|
|
|
@ -1,3 +1,14 @@
|
|
|
|
|
require(stringr)
|
|
|
|
|
library(foreach)
|
|
|
|
|
library(doParallel)
|
|
|
|
|
# MATCH TWEETS ------------------------------------------------------------
|
|
|
|
|
id_folder <- "matched-ids"
|
|
|
|
|
unlink(id_folder, recursive = TRUE)
|
|
|
|
|
dir.create(id_folder)
|
|
|
|
|
issues <- data.frame(date = drange)
|
|
|
|
|
issuelist <- readLines("issues.xml")
|
|
|
|
|
issuelist <- str_replace_all(string = issuelist, pattern = ".*<!-- .+ -->", "")
|
|
|
|
|
issuelist <- xmlToList(issuelist)
|
|
|
|
|
issueheads <- names(issuelist)
|
|
|
|
|
issues[issueheads] <- 0
|
|
|
|
|
tweets$issue <- ""
|
|
|
|
@ -77,8 +88,6 @@ else {
|
|
|
|
|
} # /for issuelist
|
|
|
|
|
} # /for tweets_curday
|
|
|
|
|
} # /for drange
|
|
|
|
|
#rm(tweets_curday,curacro, curchars, curdate,curfile,curid,curissue,curtag,curtags,curtext,d,date_end,date_start,i,id_folder,oldissue,oldtag,s,t,tags_found)
|
|
|
|
|
stopCluster(cl)
|
|
|
|
|
require(lubridate)
|
|
|
|
|
require(XML)
|
|
|
|
|
require(ggplot2)
|
|
|
|
@ -162,7 +171,7 @@ if(tags_found == TRUE) {
|
|
|
|
|
# tweets[tweets[, "id_str"] == curid, "tags"] <- str_c(oldtag, curtag, ";")
|
|
|
|
|
# Add information to file for function viewPatternMatching
|
|
|
|
|
write(str_c(curdate,";\"",curid,"\";",curissue,";",curtag), curfile, append = TRUE)
|
|
|
|
|
cat(paste("Match!\n"), file="issuecomp-analysis.log", append=TRUE)
|
|
|
|
|
# cat(paste("Match!\n"), file="issuecomp-analysis.log", append=TRUE)
|
|
|
|
|
# data.frame(date=curdate, issue=curissue)
|
|
|
|
|
break # next issue, no more tags from same issue
|
|
|
|
|
}
|
|
|
|
@ -173,6 +182,11 @@ else {
|
|
|
|
|
} # /for issuelist
|
|
|
|
|
} # /for tweets_curday
|
|
|
|
|
} # /for drange
|
|
|
|
|
stopCluster(cl)
|
|
|
|
|
drange
|
|
|
|
|
drange[40]
|
|
|
|
|
drange[50]
|
|
|
|
|
View(issues)
|
|
|
|
|
require(lubridate)
|
|
|
|
|
require(XML)
|
|
|
|
|
require(ggplot2)
|
|
|
|
@ -180,24 +194,12 @@ require(reshape2)
|
|
|
|
|
require(stringr)
|
|
|
|
|
library(foreach)
|
|
|
|
|
library(doParallel)
|
|
|
|
|
# MATCH TWEETS ------------------------------------------------------------
|
|
|
|
|
id_folder <- "matched-ids"
|
|
|
|
|
unlink(id_folder, recursive = TRUE)
|
|
|
|
|
dir.create(id_folder)
|
|
|
|
|
issues <- data.frame(date = drange)
|
|
|
|
|
issuelist <- readLines("issues.xml")
|
|
|
|
|
issuelist <- str_replace_all(string = issuelist, pattern = ".*<!-- .+ -->", "")
|
|
|
|
|
issuelist <- xmlToList(issuelist)
|
|
|
|
|
issueheads <- names(issuelist)
|
|
|
|
|
issues[issueheads] <- 0
|
|
|
|
|
tweets$issue <- ""
|
|
|
|
|
tweets$tags <- ""
|
|
|
|
|
tagexpand <- c("", "s", "n", "en", "er", "e")
|
|
|
|
|
# Parallelisation
|
|
|
|
|
writeLines(c(""), "issuecomp-analysis.log")
|
|
|
|
|
drange[70]
|
|
|
|
|
drange[80]
|
|
|
|
|
drange[90]
|
|
|
|
|
cl<-makeCluster(4)
|
|
|
|
|
registerDoParallel(cl)
|
|
|
|
|
foreach(d = 1:nrow(issues), .packages = c("stringr"), .combine=rbind) %dopar% {
|
|
|
|
|
foreach(d = 51:90, .packages = c("stringr"), .combine=rbind) %dopar% {
|
|
|
|
|
#for(d in 1:nrow(issues)) {
|
|
|
|
|
# Go through every day
|
|
|
|
|
curdate <- issues$date[d]
|
|
|
|
@ -267,24 +269,12 @@ else {
|
|
|
|
|
} # /for issuelist
|
|
|
|
|
} # /for tweets_curday
|
|
|
|
|
} # /for drange
|
|
|
|
|
#rm(tweets_curday,curacro, curchars, curdate,curfile,curid,curissue,curtag,curtags,curtext,d,date_end,date_start,i,id_folder,oldissue,oldtag,s,t,tags_found)
|
|
|
|
|
stopCluster(cl)
|
|
|
|
|
drange
|
|
|
|
|
drange[40]
|
|
|
|
|
drange[50]
|
|
|
|
|
View(issues)
|
|
|
|
|
require(lubridate)
|
|
|
|
|
require(XML)
|
|
|
|
|
require(ggplot2)
|
|
|
|
|
require(reshape2)
|
|
|
|
|
require(stringr)
|
|
|
|
|
library(foreach)
|
|
|
|
|
library(doParallel)
|
|
|
|
|
drange[70]
|
|
|
|
|
drange[80]
|
|
|
|
|
drange[90]
|
|
|
|
|
drange[121]
|
|
|
|
|
cl<-makeCluster(4)
|
|
|
|
|
registerDoParallel(cl)
|
|
|
|
|
foreach(d = 51:90, .packages = c("stringr"), .combine=rbind) %dopar% {
|
|
|
|
|
foreach(d = 91:120, .packages = c("stringr"), .combine=rbind) %dopar% {
|
|
|
|
|
#for(d in 1:nrow(issues)) {
|
|
|
|
|
# Go through every day
|
|
|
|
|
curdate <- issues$date[d]
|
|
|
|
@ -354,12 +344,18 @@ else {
|
|
|
|
|
} # /for issuelist
|
|
|
|
|
} # /for tweets_curday
|
|
|
|
|
} # /for drange
|
|
|
|
|
#rm(tweets_curday,curacro, curchars, curdate,curfile,curid,curissue,curtag,curtags,curtext,d,date_end,date_start,i,id_folder,oldissue,oldtag,s,t,tags_found)
|
|
|
|
|
stopCluster(cl)
|
|
|
|
|
drange[121]
|
|
|
|
|
drange[102]
|
|
|
|
|
require(lubridate)
|
|
|
|
|
require(XML)
|
|
|
|
|
require(ggplot2)
|
|
|
|
|
require(reshape2)
|
|
|
|
|
require(stringr)
|
|
|
|
|
library(foreach)
|
|
|
|
|
library(doParallel)
|
|
|
|
|
cl<-makeCluster(4)
|
|
|
|
|
registerDoParallel(cl)
|
|
|
|
|
foreach(d = 91:120, .packages = c("stringr"), .combine=rbind) %dopar% {
|
|
|
|
|
foreach(d = 101:nrow(issues), .packages = c("stringr"), .combine=rbind) %dopar% {
|
|
|
|
|
#for(d in 1:nrow(issues)) {
|
|
|
|
|
# Go through every day
|
|
|
|
|
curdate <- issues$date[d]
|
|
|
|
@ -429,8 +425,6 @@ else {
|
|
|
|
|
} # /for issuelist
|
|
|
|
|
} # /for tweets_curday
|
|
|
|
|
} # /for drange
|
|
|
|
|
stopCluster(cl)
|
|
|
|
|
drange[102]
|
|
|
|
|
require(lubridate)
|
|
|
|
|
require(XML)
|
|
|
|
|
require(ggplot2)
|
|
|
|
@ -438,7 +432,7 @@ require(reshape2)
|
|
|
|
|
require(stringr)
|
|
|
|
|
library(foreach)
|
|
|
|
|
library(doParallel)
|
|
|
|
|
cl<-makeCluster(4)
|
|
|
|
|
cl<-makeCluster(3)
|
|
|
|
|
registerDoParallel(cl)
|
|
|
|
|
foreach(d = 101:nrow(issues), .packages = c("stringr"), .combine=rbind) %dopar% {
|
|
|
|
|
#for(d in 1:nrow(issues)) {
|
|
|
|
@ -510,3 +504,9 @@ else {
|
|
|
|
|
} # /for issuelist
|
|
|
|
|
} # /for tweets_curday
|
|
|
|
|
} # /for drange
|
|
|
|
|
stopCluster(cl)
|
|
|
|
|
drange[200]
|
|
|
|
|
drange[300]
|
|
|
|
|
drange[280]
|
|
|
|
|
drange[270]
|
|
|
|
|
drange[259]
|
|
|
|
|