This repository has been archived by the owner on Jan 9, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdump.php
executable file
·145 lines (123 loc) · 3.97 KB
/
dump.php
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
#!/usr/bin/env php
<?php
require_once __DIR__.'/vendor/autoload.php';
date_default_timezone_set('UTC');
// From https://github.com/client9/snowflake2time
function utc2snowflake($stamp) {
bcscale(0);
return bcmul(bcsub(bcmul($stamp, 1000), '1288834974657'), '4194304');
}
function snowflake2utc($sf) {
bcscale(0);
return bcdiv(bcadd(bcdiv($sf, '4194304'), '1288834974657'), '1000');
}
/*
$maxid = utc2snowflake(time()-86400);
$tid = 0;
for ($stamp = time() ; $tid === 0 ; $stamp -= 86400) {
$date = date('Ymd', $stamp).'.txt';
$fs = trim(shell_exec("find /media/data1/dumped/twitter/ -type f -name '$date'"));
if (empty($fs)) {
echo "No files for $date\n";
continue;
}
$fs = explode("\n", $fs);
foreach ($fs as $f) {
$tail = shell_exec("tail -n 20 '$f' | grep id= | grep tweet= | tail -n 1");
if (preg_match('~tweet="(\d+)"~', $tail, $m)) {
$tid = max($tid, intval($m[1]));
}
}
}
echo "Will fetch between $tid and $maxid\n";
*/
$db = new \TDC\PDO\SQLite('db.sqlite');
$db2 = new \TDC\PDO\SQLite('db.sqlite');
$GLOBALS['sel_m'] = $db2->prepare("SELECT media_data FROM twitter_media WHERE media_id = ?");
function fetch_media($mid) {
$GLOBALS['sel_m']->execute([$mid]);
$media = json_decode($GLOBALS['sel_m']->fetchColumn(0), true);
return $media;
}
// utc2snowflake: bcmul(bcsub(bcmul($stamp, 1000), '1288834974657'), '4194304');
// 1145482148967350272 is 2019-07-01 00:00:00 UTC
// 1200927488209846272 is 2019-12-01 00:00:00 UTC
// WHERE tweet_id >= 1200927488209846272
//$stm = $db->prepexec("SELECT tweet_data FROM twitter_tweets WHERE tweet_id > :tid AND tweet_id <= :max ORDER BY tweet_id ASC", ['tid' => $tid, 'max' => $maxid]);
$stm = $db->prepexec("SELECT tweet_data FROM twitter_tweets ORDER BY tweet_id ASC");
$datef = new \DateTime('now');
$datef->setTimezone(new DateTimeZone('Europe/Berlin'));
$i = 0;
for ( ; $row = $stm->fetch() ; ++$i) {
$t = json_decode($row['tweet_data'], true);
if (empty($t)) {
$row['tweet_data'] = stripcslashes($row['tweet_data']);
$t = json_decode($row['tweet_data'], true);
}
if (empty($t)) {
continue;
}
if (!empty($t['retweeted_status'])) {
continue;
}
$txt = '';
if (!empty($t['extended_tweet']['full_text'])) {
$txt = $t['extended_tweet']['full_text'];
}
else if (!empty($t['full_text'])) {
$txt = $t['full_text'];
}
else {
$txt = $t['text'];
}
if (!empty($t['entities']['urls'])) {
foreach ($t['entities']['urls'] as $u) {
$txt = str_replace($u['url'], $u['expanded_url'], $txt);
}
}
if (!empty($t['entities']['media'])) {
foreach ($t['entities']['media'] as $u) {
if (!is_array($u)) {
$u = fetch_media($u);
}
$txt = str_replace($u['url'], $u['media_url_https'] ?? $u['media_url'], $txt);
}
}
if (!empty($t['extended_tweet']['entities']['urls'])) {
foreach ($t['extended_tweet']['entities']['urls'] as $u) {
$txt = str_replace($u['url'], $u['expanded_url'], $txt);
}
}
if (!empty($t['extended_tweet']['entities']['media'])) {
foreach ($t['extended_tweet']['entities']['media'] as $u) {
if (!is_array($u)) {
$u = fetch_media($u);
}
$txt = str_replace($u['url'], $u['media_url_https'] ?? $u['media_url'], $txt);
}
}
if (empty($t['lang'])) {
$t['lang'] = 'xx';
}
$utc = snowflake2utc($t['id']);
if ($t['id'] <= 29700859247) {
$utc = strtotime($t['created_at']);
}
$stamp = date('Y-m-d H:i:s', $utc);
$datef->setTimestamp($utc);
$lstamp = $datef->format('Y-m-d H:i:s T');
$out = '';
$out .= "<s id=\"{$t['id']}\" tweet=\"{$t['id']}\" lang=\"{$t['lang']}\" user=\"{$t['user']}\" stamp=\"{$stamp}\" lstamp=\"{$lstamp}\">\n";
$out .= $txt;
$out .= "\n</s>\n\n";
$date = $datef->format('Ymd');
$year = substr($date, 0, 4);
if (!is_dir("/media/data1/dumped/twitter/{$t['lang']}/{$year}")) {
shell_exec("mkdir -p /media/data1/dumped/twitter/{$t['lang']}/{$year}/");
}
file_put_contents("/media/data1/dumped/twitter/{$t['lang']}/{$year}/{$date}.txt", $out, FILE_APPEND);
if ($i % 1000 == 0) {
fprintf(STDERR, "Dumped $i ...\r");
}
}
fprintf(STDERR, "Dumped $i ...\n");