current status

This commit is contained in:
2014-12-05 12:28:51 +01:00
parent 4501747a44
commit 967678e2b2
8 changed files with 70796 additions and 2679 deletions
BIN
View File
Binary file not shown.
+493 -53
View File
@@ -1,72 +1,512 @@
source("functions.R") user <- as.character(acc_df$screenname[a])
setwd("~/Dokumente/Uni/Aktuell/BA-Arbeit/uni-ba-issuecomp") name <- as.character(acc_df$name[a])
require(jsonlite)
require(stringr)
require(RCurl)
require(devtools)
require(RTwitterAPI)
source("functions.R")
setwd("~/Dokumente/Uni/Aktuell/BA-Arbeit/uni-ba-issuecomp")
api_params <- c(
"oauth_consumer_key" = "c9Ob2fWNSONMC0mA2JlNaeRke",
"oauth_nonce" = NA,
"oauth_signature_method" = "HMAC-SHA1",
"oauth_timestamp" = NA,
"oauth_token" = "1007025684-RFxCDFc4OPkt02bASmdci00TB4jgaPjfqxLRT58",
"oauth_version" = "1.0",
"consumer_secret" = "cZ3Il2hmbLgK0Lc57mj5kUvymjVdsmZKYwKOGHR3NhCpvWgEOI",
"oauth_token_secret" = "rvfv8MgexFKTqrPNSoGrdrZVNhV4fTJb2Bgz249nbvKNg"
)
api_url <- "https://api.twitter.com/1.1/statuses/user_timeline.json";
# api_url <- "https://api.twitter.com/1.1/statuses/show.json";
user <- "peteraltmaier"
max_count <- "200"
max_id <- "999999999999999999" max_id <- "999999999999999999"
loop <- 1 loop <- 1
keep <- c("created_at", "id_str", "text", "retweet_count")
rm(tweets_full, tweets_temp)
repeat { repeat {
# Define specific search query
query <- c(include_rts=1, exclude_replies="true", trim_user="true", include_entities="false", query <- c(include_rts=1, exclude_replies="true", trim_user="true", include_entities="false",
screen_name=user, screen_name=user,
count=max_count, count=max_count,
max_id=max_id); max_id=max_id);
# query <- c(trim_user="true", include_entities="false", # At first, work with an temporary tweet-DB
# id="431858659656990721");
if(exists("tweets_full")) {
current <- twitter_api_call(api_url, query, api_params) current <- twitter_api_call(api_url, query, api_params)
tweets_temp <- fromJSON(current) tweets_temp <- fromJSON(correctJSON(current))
tweets_temp <- tweets_temp[keep] ## STAT ERROR HANDLING ##
tweets_full <- insertRow(tweets_full, tweets_temp) # Check for empty API returns
} status <- length(tweets_temp)
else { if(status == 0) {
current <- twitter_api_call(api_url, query, api_params) cat("[WARNING] Empty API result. Trying again.\n")
tweets_full <- fromJSON(correctJSON(current))
tweets_full <- tweets_full[keep]
}
status <- str_detect(tweets_full$created_at[nrow(tweets_full)], "2013$")
if (status) {
rm(tweets_temp) rm(tweets_temp)
for(r in 1:nrow(tweets_full)) { Sys.sleep(3)
status <- str_detect(tweets_full$created_at[r], "2013$") next
if(is.na(status)) { status <- FALSE } }
# Check if API output contains error fields
status <- "error" %in% names(tweets_temp)
if(status) { if(status) {
tweets_full <- tweets_full[-r,] cat("[WARNING] Error in API request:", tweets_temp$error[1],"\n")
} rm(tweets_temp)
}
break break
} }
# Check for other errors, mostly rate limits
status <- "errors" %in% names(tweets_temp)
if(status) {
cat("[WARNING] Error in API request:", tweets_temp$errors[1,1],"\n")
# Rate limit exceeded?
status <- tweets_temp$errors[1,2]
if(status == 88) {
rate_api_url <- "https://api.twitter.com/1.1/application/rate_limit_status.json"
rate_query <-c (resources="statuses")
resettime <- fromJSON(twitter_api_call(rate_api_url, rate_query, api_params))
resettime <- resettime$resources$statuses$`/statuses/user_timeline`$reset
curtime <- as.numeric(as.POSIXct(Sys.time()))
wait <- round(resettime - curtime + 10)
cat("[INFO] Rate limit is exceeded. Now waiting",wait,"seconds.\n")
Sys.sleep(wait)
}
rm(tweets_temp)
Sys.sleep(3)
next
}
## END ERROR HANDLING ##
# Delete unnecessary columns and add username and real name to dataframe
tweets_temp <- tweets_temp[keep]
tweets_temp <- cbind(user=user, name=name, tweets_temp)
# Now sleep 3 second to dodge 300queries/15min limit
cat("[",a,"/",nrow(acc_df),"] ", sep = "")
cat("User: ",user," in loop: ",loop,". \n", sep = "")
Sys.sleep(2)
if(tweets_full$id_str[nrow(tweets_full)] == tweets_temp$id_str[nrow(tweets_temp)] && nrow(tweets_full) > 0) {
cat("[INFO] Last tweet of temp is last tweet of full. Abort loop and begin with next user.\n")
break
}
# Is the last tweet in tweets_temp from 2013?
status <- str_detect(tweets_temp$created_at[nrow(tweets_temp)], "2014$")
# Last loop is reached. Now clear the data frame
if (!status) { # Starting when tweet not from 2014
# Delete all tweets other than from 2014
old <- 0
for(r in 1:nrow(tweets_temp)) {
status <- str_detect(tweets_temp$created_at[r], "2014$")
if(is.na(status)) {
#status <- FALSE
cat("[INFO] NA-Status in Tweet", r)
}
if(!status) { # Starting when tweet not from 2014
old <- old + 1
}
}
if(old > 0) {
old <- old - 1
# If even the first entry isn't from 2014, we have to set "old" manually because of a bug
status <- str_detect(tweets_temp$created_at[1], "2014$")
if(!status) {
old <- nrow(tweets_temp)
cat("[INFO] Timeline enhält keinen einzigen aus 2014\n")
}
# delete all lines which are older than 2014
tweets_temp <- head(tweets_temp, -old)
}
rm(old)
tweets_full <- insertRow(tweets_full, tweets_temp)
rm(tweets_temp)
break # End loop because 2013 is reached
}
# The last tweet is still from 2014, so we need another loop
else { else {
max_id <- as.character(as.numeric(tweets_full$id_str[nrow(tweets_full)]) - 1) # Setting max_id to gather next 200 tweets
loop <- loop + 1 max_id <- tweets_temp$id_str[nrow(tweets_temp)]
loop <- loop + 1 # just for stats
tweets_full <- insertRow(tweets_full, tweets_temp)
rm(tweets_temp)
} }
} # /repeat
stat_tweet <- nrow(tweets_full)
cat("User:",user,"finished after",loop,"loops. Total Tweets now:",nrow(tweets_full),"\n")
write.csv(tweets_full, "tweets_full.csv")
# Every tweet from 2014 from user[r] is downloaded. Now next user in for-loop
} }
status
content
current
query
tweets_full$text[59203]
tweets_full$text[59202]
tweets_full$text[59203]
tweets_full$user[59203]
tweets_full$user[59202]
for(a in 157:nrow(acc_df)) {
user <- as.character(acc_df$screenname[a])
name <- as.character(acc_df$name[a])
max_id <- "999999999999999999"
loop <- 1
error <- 0
repeat {
# Define specific search query
query <- c(include_rts=1, exclude_replies="true", trim_user="true", include_entities="false",
screen_name=user,
count=max_count,
max_id=max_id);
# At first, work with an temporary tweet-DB
current <- twitter_api_call(api_url, query, api_params) current <- twitter_api_call(api_url, query, api_params)
tweets_temp <- fromJSON(current) tweets_temp <- fromJSON(correctJSON(current))
## STAT ERROR HANDLING ##
# Check for empty API returns
status <- length(tweets_temp)
if(status == 0) {
if(error > 3) {
cat("[WARNING] 3x empty API result. Aborting now.\n")
}
cat("[WARNING] Empty API result. Trying again.\n")
rm(tweets_temp)
error <- error + 1
Sys.sleep(3)
next
}
# Check if API output contains error fields
status <- "error" %in% names(tweets_temp)
if(status) {
cat("[WARNING] Error in API request:", tweets_temp$error[1],"\n")
rm(tweets_temp)
break
}
# Check for other errors, mostly rate limits
status <- "errors" %in% names(tweets_temp)
if(status) {
cat("[WARNING] Error in API request:", tweets_temp$errors[1,1],"\n")
# Rate limit exceeded?
status <- tweets_temp$errors[1,2]
if(status == 88) {
rate_api_url <- "https://api.twitter.com/1.1/application/rate_limit_status.json"
rate_query <-c (resources="statuses")
resettime <- fromJSON(twitter_api_call(rate_api_url, rate_query, api_params))
resettime <- resettime$resources$statuses$`/statuses/user_timeline`$reset
curtime <- as.numeric(as.POSIXct(Sys.time()))
wait <- round(resettime - curtime + 10)
cat("[INFO] Rate limit is exceeded. Now waiting",wait,"seconds.\n")
Sys.sleep(wait)
}
rm(tweets_temp)
Sys.sleep(3)
next
}
## END ERROR HANDLING ##
# Delete unnecessary columns and add username and real name to dataframe
tweets_temp <- tweets_temp[keep]
tweets_temp <- cbind(user=user, name=name, tweets_temp)
# Now sleep 3 second to dodge 300queries/15min limit
cat("[",a,"/",nrow(acc_df),"] ", sep = "")
cat("User: ",user," in loop: ",loop,". \n", sep = "")
Sys.sleep(2)
if(tweets_full$id_str[nrow(tweets_full)] == tweets_temp$id_str[nrow(tweets_temp)] && nrow(tweets_full) > 0) {
cat("[INFO] Last tweet of temp is last tweet of full. Abort loop and begin with next user.\n")
break
}
# Is the last tweet in tweets_temp from 2013?
status <- str_detect(tweets_temp$created_at[nrow(tweets_temp)], "2014$")
# Last loop is reached. Now clear the data frame
if (!status) { # Starting when tweet not from 2014
# Delete all tweets other than from 2014
old <- 0
for(r in 1:nrow(tweets_temp)) {
status <- str_detect(tweets_temp$created_at[r], "2014$")
if(is.na(status)) {
#status <- FALSE
cat("[INFO] NA-Status in Tweet", r)
}
if(!status) { # Starting when tweet not from 2014
old <- old + 1
}
}
if(old > 0) {
old <- old - 1
# If even the first entry isn't from 2014, we have to set "old" manually because of a bug
status <- str_detect(tweets_temp$created_at[1], "2014$")
if(!status) {
old <- nrow(tweets_temp)
cat("[INFO] Timeline enhält keinen einzigen aus 2014\n")
}
# delete all lines which are older than 2014
tweets_temp <- head(tweets_temp, -old)
}
rm(old)
tweets_full <- insertRow(tweets_full, tweets_temp)
rm(tweets_temp)
break # End loop because 2013 is reached
}
# The last tweet is still from 2014, so we need another loop
else {
# Setting max_id to gather next 200 tweets
max_id <- tweets_temp$id_str[nrow(tweets_temp)]
loop <- loop + 1 # just for stats
tweets_full <- insertRow(tweets_full, tweets_temp)
rm(tweets_temp)
}
} # /repeat
stat_tweet <- nrow(tweets_full)
cat("User:",user,"finished after",loop,"loops. Total Tweets now:",nrow(tweets_full),"\n")
write.csv(tweets_full, "tweets_full.csv")
# Every tweet from 2014 from user[r] is downloaded. Now next user in for-loop
}
for(a in 157:nrow(acc_df)) {
user <- as.character(acc_df$screenname[a])
name <- as.character(acc_df$name[a])
max_id <- "999999999999999999"
loop <- 1
error <- 0
repeat {
# Define specific search query
query <- c(include_rts=1, exclude_replies="true", trim_user="true", include_entities="false",
screen_name=user,
count=max_count,
max_id=max_id);
# At first, work with an temporary tweet-DB
current <- twitter_api_call(api_url, query, api_params)
tweets_temp <- fromJSON(correctJSON(current))
## STAT ERROR HANDLING ##
# Check for empty API returns
status <- length(tweets_temp)
if(status == 0) {
if(error > 2) {
cat("[WARNING] 3x empty API result. Aborting now.\n")
break
}
cat("[WARNING] Empty API result. Trying again.\n")
rm(tweets_temp)
error <- error + 1
Sys.sleep(3)
next
}
# Check if API output contains error fields
status <- "error" %in% names(tweets_temp)
if(status) {
cat("[WARNING] Error in API request:", tweets_temp$error[1],"\n")
rm(tweets_temp)
break
}
# Check for other errors, mostly rate limits
status <- "errors" %in% names(tweets_temp)
if(status) {
cat("[WARNING] Error in API request:", tweets_temp$errors[1,1],"\n")
# Rate limit exceeded?
status <- tweets_temp$errors[1,2]
if(status == 88) {
rate_api_url <- "https://api.twitter.com/1.1/application/rate_limit_status.json"
rate_query <-c (resources="statuses")
resettime <- fromJSON(twitter_api_call(rate_api_url, rate_query, api_params))
resettime <- resettime$resources$statuses$`/statuses/user_timeline`$reset
curtime <- as.numeric(as.POSIXct(Sys.time()))
wait <- round(resettime - curtime + 10)
cat("[INFO] Rate limit is exceeded. Now waiting",wait,"seconds.\n")
Sys.sleep(wait)
}
rm(tweets_temp)
Sys.sleep(3)
next
}
## END ERROR HANDLING ##
# Delete unnecessary columns and add username and real name to dataframe
tweets_temp <- tweets_temp[keep]
tweets_temp <- cbind(user=user, name=name, tweets_temp)
# Now sleep 3 second to dodge 300queries/15min limit
cat("[",a,"/",nrow(acc_df),"] ", sep = "")
cat("User: ",user," in loop: ",loop,". \n", sep = "")
Sys.sleep(2)
if(tweets_full$id_str[nrow(tweets_full)] == tweets_temp$id_str[nrow(tweets_temp)] && nrow(tweets_full) > 0) {
cat("[INFO] Last tweet of temp is last tweet of full. Abort loop and begin with next user.\n")
break
}
# Is the last tweet in tweets_temp from 2013?
status <- str_detect(tweets_temp$created_at[nrow(tweets_temp)], "2014$")
# Last loop is reached. Now clear the data frame
if (!status) { # Starting when tweet not from 2014
# Delete all tweets other than from 2014
old <- 0
for(r in 1:nrow(tweets_temp)) {
status <- str_detect(tweets_temp$created_at[r], "2014$")
if(is.na(status)) {
#status <- FALSE
cat("[INFO] NA-Status in Tweet", r)
}
if(!status) { # Starting when tweet not from 2014
old <- old + 1
}
}
if(old > 0) {
old <- old - 1
# If even the first entry isn't from 2014, we have to set "old" manually because of a bug
status <- str_detect(tweets_temp$created_at[1], "2014$")
if(!status) {
old <- nrow(tweets_temp)
cat("[INFO] Timeline enhält keinen einzigen aus 2014\n")
}
# delete all lines which are older than 2014
tweets_temp <- head(tweets_temp, -old)
}
rm(old)
tweets_full <- insertRow(tweets_full, tweets_temp)
rm(tweets_temp)
break # End loop because 2013 is reached
}
# The last tweet is still from 2014, so we need another loop
else {
# Setting max_id to gather next 200 tweets
max_id <- tweets_temp$id_str[nrow(tweets_temp)]
loop <- loop + 1 # just for stats
tweets_full <- insertRow(tweets_full, tweets_temp)
rm(tweets_temp)
}
} # /repeat
stat_tweet <- nrow(tweets_full)
cat("User:",user,"finished after",loop,"loops. Total Tweets now:",nrow(tweets_full),"\n")
write.csv(tweets_full, "tweets_full.csv")
# Every tweet from 2014 from user[r] is downloaded. Now next user in for-loop
}
current
correctJSON(current)
correctJSON <- function(string) {
# string <- str_replace_all(string, pattern = "\n", replacement = " ")
# string <- str_replace_all(string, pattern = "\r", replacement = " ")
# string <- str_replace_all(string, pattern = "\\^", replacement = " ")
string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ")
string <- str_replace_all(string, pattern = "&..;", replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ")
return(string)
}
correctJSON(current)
a
tweets_temp <- fromJSON(correctJSON(current)) tweets_temp <- fromJSON(correctJSON(current))
current current
correctJSON <- function(string) {
# string <- str_replace_all(string, pattern = "\n", replacement = " ")
# string <- str_replace_all(string, pattern = "\r", replacement = " ")
# string <- str_replace_all(string, pattern = "\\^", replacement = " ")
string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ")
string <- str_replace_all(string, pattern = "&..;", replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ")
string <- str_replace_all(string, pattern = "\\\\\\\\\\", replacement = "\\\\\\")
return(string)
}
tweets_temp <- fromJSON(correctJSON(current))
correctJSON <- function(string) {
# string <- str_replace_all(string, pattern = "\n", replacement = " ")
# string <- str_replace_all(string, pattern = "\r", replacement = " ")
# string <- str_replace_all(string, pattern = "\\^", replacement = " ")
string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ")
string <- str_replace_all(string, pattern = "&..;", replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\\\(?![tn"])'), replacement = " ")
return(string)
}
tweets_temp <- fromJSON(correctJSON(current))
correctJSON <- function(string) {
# string <- str_replace_all(string, pattern = "\n", replacement = " ")
# string <- str_replace_all(string, pattern = "\r", replacement = " ")
# string <- str_replace_all(string, pattern = "\\^", replacement = " ")
string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ")
string <- str_replace_all(string, pattern = "&..;", replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\\\\\(?![tn"])'), replacement = " ")
return(string)
}
tweets_temp <- fromJSON(correctJSON(current))
correctJSON <- function(string) {
# string <- str_replace_all(string, pattern = "\n", replacement = " ")
# string <- str_replace_all(string, pattern = "\r", replacement = " ")
# string <- str_replace_all(string, pattern = "\\^", replacement = " ")
string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ")
string <- str_replace_all(string, pattern = "&..;", replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\\\\\\\'), replacement = " ")
return(string)
}
tweets_temp <- fromJSON(correctJSON(current))
correctJSON <- function(string) {
# string <- str_replace_all(string, pattern = "\n", replacement = " ")
# string <- str_replace_all(string, pattern = "\r", replacement = " ")
# string <- str_replace_all(string, pattern = "\\^", replacement = " ")
string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ")
string <- str_replace_all(string, pattern = "&..;", replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ")
string <- str_replace_all(string, pattern = "[\]{5}", replacement = " ")
return(string)
}
correctJSON <- function(string) {
# string <- str_replace_all(string, pattern = "\n", replacement = " ")
# string <- str_replace_all(string, pattern = "\r", replacement = " ")
# string <- str_replace_all(string, pattern = "\\^", replacement = " ")
string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ")
string <- str_replace_all(string, pattern = "&..;", replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ")
string <- str_replace_all(string, pattern = "[\\]{5}", replacement = " ")
return(string)
}
tweets_temp <- fromJSON(correctJSON(current))
correctJSON <- function(string) {
# string <- str_replace_all(string, pattern = "\n", replacement = " ")
# string <- str_replace_all(string, pattern = "\r", replacement = " ")
# string <- str_replace_all(string, pattern = "\\^", replacement = " ")
string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ")
string <- str_replace_all(string, pattern = "&..;", replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ")
string <- str_replace_all(string, pattern = "[\\]{6}", replacement = " ")
return(string)
}
tweets_temp <- fromJSON(correctJSON(current))
correctJSON <- function(string) {
# string <- str_replace_all(string, pattern = "\n", replacement = " ")
# string <- str_replace_all(string, pattern = "\r", replacement = " ")
# string <- str_replace_all(string, pattern = "\\^", replacement = " ")
string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ")
string <- str_replace_all(string, pattern = "&..;", replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ")
string <- str_replace_all(string, pattern = "\\{6}", replacement = " ")
return(string)
}
tweets_temp <- fromJSON(correctJSON(current))
correctJSON <- function(string) {
# string <- str_replace_all(string, pattern = "\n", replacement = " ")
# string <- str_replace_all(string, pattern = "\r", replacement = " ")
# string <- str_replace_all(string, pattern = "\\^", replacement = " ")
string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ")
string <- str_replace_all(string, pattern = "&..;", replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ")
string <- str_replace_all(string, pattern = "\\{5}", replacement = " ")
return(string)
}
insertRow <- function(existingDF, newrow, r) {
r <- as.numeric(nrow(existingDF)) + 1
existingDF <- rbind(existingDF,newrow)
existingDF <- existingDF[order(c(1:(nrow(existingDF)-1),r-0.5)),]
row.names(existingDF) <- 1:nrow(existingDF)
return(existingDF)
}
tweets_temp <- fromJSON(correctJSON(current))
str_detect(current, pattern = "\\")
str_detect(current, pattern = '\\')
str_detect(current, pattern = "\\\")
str_detect(current, pattern = "[\\]")
str_detect(current, pattern = "[\\\\]")
str_detect(current, pattern = "[\\\\\\]")
str_detect(current, pattern = "[\\\\\\\\]")
str_detect(current, pattern = "[\\\\\\\\\\\\]")
str_detect(current, pattern = "[\\\\\\\\\\\\\\]")
str_detect(current, pattern = "[\\]{1}")
str_detect(current, pattern = "[\\]{7}")
str_detect(current, pattern = "[\\]{2}")
str_detect(current, pattern = "[\\]{3}")
str_detect(current, pattern = "[\\]{4}")
str_detect(current, pattern = "[\\]{5}")
current current
rm(tweets_full, api_params, api_url, keep, loop, max_count, max_id, status, user, insertRow, correctJSON2) str_detect(current, pattern = "[\\]{5}")
query str_detect(current, pattern = "[\]{5}")
api_url <- "https://api.twitter.com/1.1/statuses/user_timeline.json"; str_detect(current, pattern = "[\\]{2}")
save.image("~/Dokumente/Uni/Aktuell/BA-Arbeit/uni-ba-issuecomp/RData.RData") grep("[\\]", current)
load("/home/max/Dokumente/Uni/Aktuell/BA-Arbeit/uni-ba-issuecomp/RData.RData") grep("[\\]{2}", current)
grep("[\\]{3}", current)
grep("[\\]{4}", current)
grep("[\\]{5}", current)
grep("[\\]{1}", current)
str_detect(current, pattern = fixed("\\\\\"))
str_detect(current, pattern = fixed("\\\\\\\\\\\"))
str_detect(current, pattern = fixed("\\\\\\\\\\"))
str_detect(current, pattern = fixed("\\\\\\\\"))
str_detect(current, pattern = fixed("\\\\\\"))
str_detect(current, pattern = fixed("\\\\\"))
str_detect(current, pattern = fixed("\\\\"))
str_detect(current, pattern = fixed("\\"))
nchar("\\")
nchar("\\\\")
nchar("\\\\\\")
nchar("\\\\\\\\")
nchar("\\\\\\\\\\")
print("\\\\\\\\\\")
print("\\\\\")
print("\\\\\\")
print("\\\\")
print("\\\\\")
write(current, "current.txt")
current2 <- readLines("current.txt")
current
current2
rm(current2)
a
BIN
View File
Binary file not shown.
+3
View File
File diff suppressed because one or more lines are too long
+37 -3
View File
@@ -39,15 +39,17 @@ api_params <- c(
"oauth_token_secret" = "rvfv8MgexFKTqrPNSoGrdrZVNhV4fTJb2Bgz249nbvKNg" "oauth_token_secret" = "rvfv8MgexFKTqrPNSoGrdrZVNhV4fTJb2Bgz249nbvKNg"
) )
api_url <- "https://api.twitter.com/1.1/statuses/user_timeline.json";
api_url <- "https://api.twitter.com/1.1/statuses/user_timeline.json";
max_count <- "200" max_count <- "200"
keep <- c("created_at", "id_str", "text", "retweet_count") keep <- c("created_at", "id_str", "text", "retweet_count")
tweets_full <- data.frame(user=character(), name=character(), created_at=character(), id_str=character(), text=character(), retweet_count=character()) tweets_full <- data.frame(user=character(), name=character(), created_at=character(), id_str=character(), text=character(), retweet_count=character())
for(a in 1:nrow(acc_df)) { for(a in 157:nrow(acc_df)) {
user <- as.character(acc_df$screenname[a]) user <- as.character(acc_df$screenname[a])
name <- as.character(acc_df$name[a]) name <- as.character(acc_df$name[a])
max_id <- "999999999999999999" max_id <- "999999999999999999"
loop <- 1 loop <- 1
error <- 0
repeat { repeat {
# Define specific search query # Define specific search query
query <- c(include_rts=1, exclude_replies="true", trim_user="true", include_entities="false", query <- c(include_rts=1, exclude_replies="true", trim_user="true", include_entities="false",
@@ -59,11 +61,19 @@ for(a in 1:nrow(acc_df)) {
current <- twitter_api_call(api_url, query, api_params) current <- twitter_api_call(api_url, query, api_params)
tweets_temp <- fromJSON(correctJSON(current)) tweets_temp <- fromJSON(correctJSON(current))
## STAT ERROR HANDLING ##
# Check for empty API returns
status <- length(tweets_temp) status <- length(tweets_temp)
if(status == 0) { if(status == 0) {
if(error > 2) {
cat("[WARNING] 3x empty API result. Aborting now.\n")
break
}
cat("[WARNING] Empty API result. Trying again.\n") cat("[WARNING] Empty API result. Trying again.\n")
rm(tweets_temp) rm(tweets_temp)
Sys.sleep(2) error <- error + 1
Sys.sleep(3)
next next
} }
@@ -75,6 +85,30 @@ for(a in 1:nrow(acc_df)) {
break break
} }
# Check for other errors, mostly rate limits
status <- "errors" %in% names(tweets_temp)
if(status) {
cat("[WARNING] Error in API request:", tweets_temp$errors[1,1],"\n")
# Rate limit exceeded?
status <- tweets_temp$errors[1,2]
if(status == 88) {
rate_api_url <- "https://api.twitter.com/1.1/application/rate_limit_status.json"
rate_query <-c (resources="statuses")
resettime <- fromJSON(twitter_api_call(rate_api_url, rate_query, api_params))
resettime <- resettime$resources$statuses$`/statuses/user_timeline`$reset
curtime <- as.numeric(as.POSIXct(Sys.time()))
wait <- round(resettime - curtime + 10)
cat("[INFO] Rate limit is exceeded. Now waiting",wait,"seconds.\n")
Sys.sleep(wait)
}
rm(tweets_temp)
Sys.sleep(3)
next
}
## END ERROR HANDLING ##
# Delete unnecessary columns and add username and real name to dataframe # Delete unnecessary columns and add username and real name to dataframe
tweets_temp <- tweets_temp[keep] tweets_temp <- tweets_temp[keep]
tweets_temp <- cbind(user=user, name=name, tweets_temp) tweets_temp <- cbind(user=user, name=name, tweets_temp)
+2 -10
View File
@@ -6,7 +6,9 @@ correctJSON <- function(string) {
# string <- str_replace_all(string, pattern = "\r", replacement = " ") # string <- str_replace_all(string, pattern = "\r", replacement = " ")
# string <- str_replace_all(string, pattern = "\\^", replacement = " ") # string <- str_replace_all(string, pattern = "\\^", replacement = " ")
string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ") string <- str_replace_all(string, pattern = "[^[:print:]]", replacement = " ")
string <- str_replace_all(string, pattern = "&..;", replacement = " ")
string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ") string <- str_replace_all(string, pattern = perl('\\\\(?![tn"])'), replacement = " ")
string <- str_replace_all(string, pattern = "\\{5}", replacement = " ")
return(string) return(string)
} }
@@ -17,13 +19,3 @@ insertRow <- function(existingDF, newrow, r) {
row.names(existingDF) <- 1:nrow(existingDF) row.names(existingDF) <- 1:nrow(existingDF)
return(existingDF) return(existingDF)
} }
# mergeIfExists <- function(fulldf, tempdf) {
# if(exists(fulldf)) {
# fulldf <- insertRow(fulldf, tempdf)
# }
# else {
# fulldf <- tempdf
# }
# return(fulldf)
# }
Binary file not shown.
+70261 -2613
View File
File diff suppressed because it is too large Load Diff