Files
uni-ba-socialagenda/extract-twitter-accounts.R
T

105 lines
3.0 KiB
R
Raw Normal View History

2014-11-29 13:52:09 +01:00
require(jsonlite)
require(stringr)
require(RCurl)
2014-11-29 16:11:13 +01:00
require(devtools)
require(RTwitterAPI)
2014-11-29 13:52:09 +01:00
2014-11-29 16:11:13 +01:00
setwd("~/Dokumente/Uni/Aktuell/BA-Arbeit/uni-ba-issuecomp")
2014-11-29 13:52:09 +01:00
2014-12-01 17:41:33 +01:00
source("functions.R")
2014-11-29 13:52:09 +01:00
# Set curl handle for friendly scraping
handle <- getCurlHandle(httpheader = list(from = "max.mehl@uni.kn",
'user-agent' = str_c(R.version$version.string)
)
)
acc_url <- "http://www.bundestwitter.de/api/politiker"
2014-11-30 03:41:23 +01:00
#acc_json <- readLines("politiker.txt")
acc_df <- fromJSON(acc_url)
2014-11-29 13:52:09 +01:00
2014-11-30 03:41:23 +01:00
# ---------------
2014-11-29 16:11:13 +01:00
# http://www.joyofdata.de/blog/twitters-rest-api-v1-1-with-r-for-linux-and-windows/
# devtools::install_github("joyofdata/RTwitterAPI")
2014-11-30 03:41:23 +01:00
api_params <- c(
2014-11-29 16:11:13 +01:00
"oauth_consumer_key" = "c9Ob2fWNSONMC0mA2JlNaeRke",
"oauth_nonce" = NA,
"oauth_signature_method" = "HMAC-SHA1",
"oauth_timestamp" = NA,
"oauth_token" = "1007025684-RFxCDFc4OPkt02bASmdci00TB4jgaPjfqxLRT58",
"oauth_version" = "1.0",
"consumer_secret" = "cZ3Il2hmbLgK0Lc57mj5kUvymjVdsmZKYwKOGHR3NhCpvWgEOI",
"oauth_token_secret" = "rvfv8MgexFKTqrPNSoGrdrZVNhV4fTJb2Bgz249nbvKNg"
2014-11-30 03:41:23 +01:00
)
2014-11-29 21:52:46 +01:00
2014-11-30 03:41:23 +01:00
api_url <- "https://api.twitter.com/1.1/statuses/user_timeline.json";
2014-12-01 17:41:33 +01:00
user <- "GregorGysi"
2014-11-30 03:41:23 +01:00
max_count <- "200"
max_id <- "999999999999999999"
loop <- 1
keep <- c("created_at", "id_str", "text", "retweet_count")
2014-12-01 17:41:33 +01:00
rm(tweets_full)
2014-12-01 18:20:45 +01:00
last_id <- NULL
2014-11-30 03:41:23 +01:00
repeat {
2014-12-01 17:41:33 +01:00
# Define specific search query
2014-11-30 03:41:23 +01:00
query <- c(include_rts=1, exclude_replies="true", trim_user="true", include_entities="false",
screen_name=user,
count=max_count,
max_id=max_id);
2014-12-01 17:41:33 +01:00
# If a tweets_full DB already exists (after the first loop this should be the case)
2014-11-30 03:41:23 +01:00
if(exists("tweets_full")) {
current <- twitter_api_call(api_url, query, api_params)
2014-12-01 17:41:33 +01:00
tweets_temp <- fromJSON(correctJSON(current))
2014-11-30 03:41:23 +01:00
tweets_temp <- tweets_temp[keep]
tweets_full <- insertRow(tweets_full, tweets_temp)
2014-12-01 17:41:33 +01:00
rm(tweets_temp)
2014-11-30 03:41:23 +01:00
}
2014-12-01 17:41:33 +01:00
# First loop
2014-11-30 03:41:23 +01:00
else {
current <- twitter_api_call(api_url, query, api_params)
tweets_full <- fromJSON(correctJSON(current))
tweets_full <- tweets_full[keep]
}
2014-12-01 18:20:45 +01:00
# Now sleep 3 second to dodge 300queries/15min limit
cat("User:",user,"in loop:",loop,"- now waiting 3 secs...\n")
Sys.sleep(3)
2014-12-01 17:41:33 +01:00
# Is the last tweet in tweets_full from 2013?
2014-11-30 03:41:23 +01:00
status <- str_detect(tweets_full$created_at[nrow(tweets_full)], "2013$")
2014-12-01 17:41:33 +01:00
# Last loop is reached. Now clear the data frame
2014-11-30 03:41:23 +01:00
if (status) {
2014-12-01 17:41:33 +01:00
# Delete all tweets from 2013
old <- 0
2014-11-30 03:41:23 +01:00
for(r in 1:nrow(tweets_full)) {
status <- str_detect(tweets_full$created_at[r], "2013$")
if(is.na(status)) { status <- FALSE }
if(status) {
2014-12-01 17:41:33 +01:00
old <- old + 1
2014-11-30 03:41:23 +01:00
}
}
2014-12-01 17:41:33 +01:00
if(old > 0) {
old <- old - 1
tweets_full <- head(tweets_full, -old)
}
rm(old)
break # End loop because 2013 is reached
2014-11-30 03:41:23 +01:00
}
2014-12-01 17:41:33 +01:00
# The last tweet is still from 2014, so we need another loop
2014-11-30 03:41:23 +01:00
else {
2014-12-01 18:20:45 +01:00
# Setting max_id to gather next 200 tweets
max_id <- tweets_full$id_str[nrow(tweets_full)]
loop <- loop + 1 # just for stats
2014-11-30 03:41:23 +01:00
}
}
2014-11-29 21:30:42 +01:00
# ---------------
2014-11-29 16:11:13 +01:00