├── .gitignore
├── Database
└── fb_data.sql
├── LICENSE
├── README.md
├── config.py
├── group_functions.py
├── mysql_functions.py
├── page_functions.py
└── script.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 |
27 | # PyInstaller
28 | # Usually these files are written by a python script from a template
29 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
30 | *.manifest
31 | *.spec
32 |
33 | # Installer logs
34 | pip-log.txt
35 | pip-delete-this-directory.txt
36 |
37 | # Unit test / coverage reports
38 | htmlcov/
39 | .tox/
40 | .coverage
41 | .coverage.*
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 | *,cover
46 | .hypothesis/
47 |
48 | # Translations
49 | *.mo
50 | *.pot
51 |
52 | # Django stuff:
53 | *.log
54 |
55 | # Sphinx documentation
56 | docs/_build/
57 |
58 | # PyBuilder
59 | target/
60 |
61 | #Ipython Notebook
62 | .ipynb_checkpoints
63 |
--------------------------------------------------------------------------------
/Database/fb_data.sql:
--------------------------------------------------------------------------------
1 | -- phpMyAdmin SQL Dump
2 | -- version 4.0.10deb1
3 | -- http://www.phpmyadmin.net
4 | --
5 | -- Host: localhost
6 | -- Generation Time: Jun 06, 2016 at 05:36 AM
7 | -- Server version: 5.5.47-0ubuntu0.14.04.1
8 | -- PHP Version: 5.5.9-1ubuntu4.14
9 |
10 | SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
11 | SET time_zone = "+00:00";
12 |
13 |
14 | /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
15 | /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
16 | /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
17 | /*!40101 SET NAMES utf8 */;
18 |
19 | --
20 | -- Database: `fb_data`
21 | --
22 |
23 | -- --------------------------------------------------------
24 |
25 | --
26 | -- Table structure for table `comments`
27 | --
28 |
29 | CREATE TABLE IF NOT EXISTS `comments` (
30 | `id` int(11) NOT NULL AUTO_INCREMENT,
31 | `time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
32 | `pageid` varchar(200) NOT NULL,
33 | `postid` varchar(200) NOT NULL,
34 | `commentid` varchar(200) NOT NULL,
35 | `message` varchar(10000) NOT NULL,
36 | `fromid` varchar(100) NOT NULL,
37 | `fromname` varchar(100) NOT NULL,
38 | `createdtime` varchar(50) NOT NULL,
39 | `total_likes` int(11) NOT NULL,
40 | PRIMARY KEY (`id`)
41 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8 AUTO_INCREMENT=1 ;
42 |
43 | -- --------------------------------------------------------
44 |
45 | --
46 | -- Table structure for table `likes`
47 | --
48 |
49 | CREATE TABLE IF NOT EXISTS `likes` (
50 | `id` int(11) NOT NULL AUTO_INCREMENT,
51 | `time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
52 | `pageid` varchar(200) NOT NULL,
53 | `postid` varchar(200) NOT NULL,
54 | `commentid` varchar(200) DEFAULT NULL,
55 | `replyid` varchar(200) DEFAULT NULL,
56 | `fromname` varchar(200) NOT NULL,
57 | `fromid` varchar(200) NOT NULL,
58 | PRIMARY KEY (`id`)
59 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8 AUTO_INCREMENT=1 ;
60 |
61 | -- --------------------------------------------------------
62 |
63 | --
64 | -- Table structure for table `Posts`
65 | --
66 |
67 | CREATE TABLE IF NOT EXISTS `Posts` (
68 | `id` int(11) NOT NULL AUTO_INCREMENT,
69 | `date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
70 | `pageid` varchar(200) NOT NULL,
71 | `postid` varchar(200) NOT NULL,
72 | `message` varchar(10000) NOT NULL,
73 | `fromid` varchar(100) NOT NULL,
74 | `fromname` varchar(100) NOT NULL,
75 | `createdtime` varchar(50) NOT NULL,
76 | `total_likes` int(11) NOT NULL,
77 | PRIMARY KEY (`id`)
78 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8 AUTO_INCREMENT=1 ;
79 |
80 | -- --------------------------------------------------------
81 |
82 | --
83 | -- Table structure for table `replys`
84 | --
85 |
86 | CREATE TABLE IF NOT EXISTS `replys` (
87 | `id` int(11) NOT NULL AUTO_INCREMENT,
88 | `time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
89 | `pageid` varchar(200) NOT NULL,
90 | `postid` varchar(200) NOT NULL,
91 | `commentid` varchar(200) NOT NULL,
92 | `replyid` varchar(200) NOT NULL,
93 | `message` varchar(10000) NOT NULL,
94 | `fromid` varchar(100) NOT NULL,
95 | `fromname` varchar(100) NOT NULL,
96 | `createdtime` varchar(50) NOT NULL,
97 | `total_likes` int(11) NOT NULL,
98 | PRIMARY KEY (`id`)
99 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8 AUTO_INCREMENT=1 ;
100 |
101 | -- --------------------------------------------------------
102 |
103 | --
104 | -- Table structure for table `status`
105 | --
106 |
107 | CREATE TABLE IF NOT EXISTS `status` (
108 | `id` int(11) NOT NULL AUTO_INCREMENT,
109 | `date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
110 | `pageid` varchar(50) NOT NULL,
111 | `next` varchar(1000) NOT NULL,
112 | PRIMARY KEY (`id`),
113 | UNIQUE KEY `pageid` (`pageid`)
114 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8 AUTO_INCREMENT=1 ;
115 |
116 | /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
117 | /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
118 | /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
119 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | CC0 1.0 Universal
2 |
3 | Statement of Purpose
4 |
5 | The laws of most jurisdictions throughout the world automatically confer
6 | exclusive Copyright and Related Rights (defined below) upon the creator and
7 | subsequent owner(s) (each and all, an "owner") of an original work of
8 | authorship and/or a database (each, a "Work").
9 |
10 | Certain owners wish to permanently relinquish those rights to a Work for the
11 | purpose of contributing to a commons of creative, cultural and scientific
12 | works ("Commons") that the public can reliably and without fear of later
13 | claims of infringement build upon, modify, incorporate in other works, reuse
14 | and redistribute as freely as possible in any form whatsoever and for any
15 | purposes, including without limitation commercial purposes. These owners may
16 | contribute to the Commons to promote the ideal of a free culture and the
17 | further production of creative, cultural and scientific works, or to gain
18 | reputation or greater distribution for their Work in part through the use and
19 | efforts of others.
20 |
21 | For these and/or other purposes and motivations, and without any expectation
22 | of additional consideration or compensation, the person associating CC0 with a
23 | Work (the "Affirmer"), to the extent that he or she is an owner of Copyright
24 | and Related Rights in the Work, voluntarily elects to apply CC0 to the Work
25 | and publicly distribute the Work under its terms, with knowledge of his or her
26 | Copyright and Related Rights in the Work and the meaning and intended legal
27 | effect of CC0 on those rights.
28 |
29 | 1. Copyright and Related Rights. A Work made available under CC0 may be
30 | protected by copyright and related or neighboring rights ("Copyright and
31 | Related Rights"). Copyright and Related Rights include, but are not limited
32 | to, the following:
33 |
34 | i. the right to reproduce, adapt, distribute, perform, display, communicate,
35 | and translate a Work;
36 |
37 | ii. moral rights retained by the original author(s) and/or performer(s);
38 |
39 | iii. publicity and privacy rights pertaining to a person's image or likeness
40 | depicted in a Work;
41 |
42 | iv. rights protecting against unfair competition in regards to a Work,
43 | subject to the limitations in paragraph 4(a), below;
44 |
45 | v. rights protecting the extraction, dissemination, use and reuse of data in
46 | a Work;
47 |
48 | vi. database rights (such as those arising under Directive 96/9/EC of the
49 | European Parliament and of the Council of 11 March 1996 on the legal
50 | protection of databases, and under any national implementation thereof,
51 | including any amended or successor version of such directive); and
52 |
53 | vii. other similar, equivalent or corresponding rights throughout the world
54 | based on applicable law or treaty, and any national implementations thereof.
55 |
56 | 2. Waiver. To the greatest extent permitted by, but not in contravention of,
57 | applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and
58 | unconditionally waives, abandons, and surrenders all of Affirmer's Copyright
59 | and Related Rights and associated claims and causes of action, whether now
60 | known or unknown (including existing as well as future claims and causes of
61 | action), in the Work (i) in all territories worldwide, (ii) for the maximum
62 | duration provided by applicable law or treaty (including future time
63 | extensions), (iii) in any current or future medium and for any number of
64 | copies, and (iv) for any purpose whatsoever, including without limitation
65 | commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes
66 | the Waiver for the benefit of each member of the public at large and to the
67 | detriment of Affirmer's heirs and successors, fully intending that such Waiver
68 | shall not be subject to revocation, rescission, cancellation, termination, or
69 | any other legal or equitable action to disrupt the quiet enjoyment of the Work
70 | by the public as contemplated by Affirmer's express Statement of Purpose.
71 |
72 | 3. Public License Fallback. Should any part of the Waiver for any reason be
73 | judged legally invalid or ineffective under applicable law, then the Waiver
74 | shall be preserved to the maximum extent permitted taking into account
75 | Affirmer's express Statement of Purpose. In addition, to the extent the Waiver
76 | is so judged Affirmer hereby grants to each affected person a royalty-free,
77 | non transferable, non sublicensable, non exclusive, irrevocable and
78 | unconditional license to exercise Affirmer's Copyright and Related Rights in
79 | the Work (i) in all territories worldwide, (ii) for the maximum duration
80 | provided by applicable law or treaty (including future time extensions), (iii)
81 | in any current or future medium and for any number of copies, and (iv) for any
82 | purpose whatsoever, including without limitation commercial, advertising or
83 | promotional purposes (the "License"). The License shall be deemed effective as
84 | of the date CC0 was applied by Affirmer to the Work. Should any part of the
85 | License for any reason be judged legally invalid or ineffective under
86 | applicable law, such partial invalidity or ineffectiveness shall not
87 | invalidate the remainder of the License, and in such case Affirmer hereby
88 | affirms that he or she will not (i) exercise any of his or her remaining
89 | Copyright and Related Rights in the Work or (ii) assert any associated claims
90 | and causes of action with respect to the Work, in either case contrary to
91 | Affirmer's express Statement of Purpose.
92 |
93 | 4. Limitations and Disclaimers.
94 |
95 | a. No trademark or patent rights held by Affirmer are waived, abandoned,
96 | surrendered, licensed or otherwise affected by this document.
97 |
98 | b. Affirmer offers the Work as-is and makes no representations or warranties
99 | of any kind concerning the Work, express, implied, statutory or otherwise,
100 | including without limitation warranties of title, merchantability, fitness
101 | for a particular purpose, non infringement, or the absence of latent or
102 | other defects, accuracy, or the present or absence of errors, whether or not
103 | discoverable, all to the greatest extent permissible under applicable law.
104 |
105 | c. Affirmer disclaims responsibility for clearing rights of other persons
106 | that may apply to the Work or any use thereof, including without limitation
107 | any person's Copyright and Related Rights in the Work. Further, Affirmer
108 | disclaims responsibility for obtaining any necessary consents, permissions
109 | or other rights required for any use of the Work.
110 |
111 | d. Affirmer understands and acknowledges that Creative Commons is not a
112 | party to this document and has no duty or obligation with respect to this
113 | CC0 or use of the Work.
114 |
115 | For more information, please see
116 |
117 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # FacebookScraper
2 | This script scrapes posts and comments from facebook pages,groups and store its with their details in mysql databases .
3 |
4 | # Requirements
5 | This script test under this Requirements
6 | * Ubuntu OS.
7 | * Python [ installed already with ubuntu ] .
8 | * MySQL Server .
9 | install it by this command
10 | `$ apt-get install mysql-server`
11 |
12 | # Setup Script
13 | * Download Script .
14 | `$ git clone https://github.com/ihalloum/FacebookScraper.git`
15 | * Create Database fb_data
16 | `$ mysql -u root -p`
17 | `mysql> CREATE DATABASE fb_data;`
18 | * Import Database structure .[ Use /database/fb_dta.sql ].
19 | `$mysql -u root -p fb_data < ./Database/fb_data.sql`
20 | * Edit Script vars to fit your data.
21 | * Run Script :)
22 | `$python -W ignore script.py ScrapePage=Y ScrapeGroup=Y ScrapeComment=Y ScrapeReply=Y ScrapeLike=Y`
23 |
24 |
--------------------------------------------------------------------------------
/config.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | #---------------------------------- Graph Var ---------------------------------------
4 | # Graph URL check version
5 | graph_url = "https://graph.facebook.com/v2.6/"
6 | #Access Token = App ID + App Secret
7 | accesstoken=""+"|"+""
8 |
9 | #---------------------------------- MySQL Var ---------------------------------------
10 | # MySQL Server IP
11 | host='127.0.0.1'
12 | # Database UserName
13 | username='root'
14 | # Database Password
15 | password=''
16 | # Database Name
17 | DBName='fb_data'
18 |
19 | #---------------------------------- Pages Var ---------------------------------------
20 | # Add the Pages ID
21 | #list_pages = ["95969849939","95495949393"]
22 | list_pages = ["63811549237"]
23 | #Posts Details want to collect
24 | posts_fields = "message,likes.limit(1).summary(true),from,created_time"
25 | # Number of posts you want to collect from signal page , min=25
26 | fb_post_max = 3
27 | # Enable Scrape Page
28 | scrape_page = 1
29 | #---------------------------------- Groups Var --------------------------------------
30 | # Add the Groups ID
31 | #list_groups = ["9509465696t5534","56777754433"]
32 | list_groups = ["1474176602870235"]
33 | #Feeds Details want to collect
34 | feeds_fields = "message,likes.limit(1).summary(true),from,created_time"
35 | # Number of feeds you want to collect from signal group . min=25
36 | fb_feed_max = 3
37 | # Enable Scrape Group
38 | scrape_group = 1
39 | #---------------------------------- Pages & Groups Var ------------------------------
40 | #Comments Details want to collect
41 | comments_fields = "message,likes.limit(1).summary(true),from,created_time"
42 | # tha max number of comments you want to collect from signal post
43 | fb_comment_max = 3
44 | # tha max number of Reply you want to collect from signal comment
45 | fb_reply_max = 3
46 | # tha max number of like you want to collect from signal post,comment,reply
47 | fb_like_max = 3
48 | # Enable Scrape Comment
49 | scrape_comment = 1
50 | # Enable Scrape Reply
51 | scrape_reply = 1
52 | # Enable Scrape like
53 | scrape_like = 1
54 |
--------------------------------------------------------------------------------
/group_functions.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import urllib2
4 | import json
5 |
6 |
7 | from config import *
8 | from mysql_functions import *
9 |
10 |
11 | fb_feed_counter = 0
12 | fb_comment_counter = 0
13 | fb_reply_counter = 0
14 | fb_like_counter = 0
15 | fb_total_feed_counter = 0
16 | fb_total_comment_counter = 0
17 | fb_total_reply_counter = 0
18 | fb_total_like_counter = 0
19 | feeds_parametrs = "/feed/?fields="+feeds_fields+"&access_token="+accesstoken
20 | comments_parametrs = "?fields="+comments_fields+"&access_token="+accesstoken
21 |
22 | # This function update next record in status tables which contains group id and last feed page scanned for this group
23 | def update_group_status(group_id,status):
24 | try:
25 | statement="INSERT INTO status (pageid,next) VALUES ('"+group_id+"', '"+status+"') ON DUPLICATE KEY UPDATE next ='"+status+"'"
26 | excuteQuery(statement)
27 | except Exception as ex:
28 | print str(ex);
29 | pass
30 |
31 | # This function store likes for selected feeds,comments,replys in the likes table
32 | def get_fb_like ( group_id,feed_id ,comment_id,reply_id,want_str):
33 | if (want_str=="feed"):
34 | want_id=feed_id
35 | elif (want_str=="comment"):
36 | want_id=comment_id
37 | elif (want_str == "reply"):
38 | want_id=reply_id
39 | else :
40 | print "LIKES FUNCTION want_str Error Value"
41 | return
42 |
43 | current_page = graph_url + want_id + "/likes" + "?limit=" + str(fb_like_max) +"&access_token="+accesstoken
44 | web_response = urllib2.urlopen(current_page)
45 | readable_page = web_response.read()
46 | json_fbpage = json.loads(readable_page)
47 | for data in json_fbpage["data"]:
48 | try :
49 |
50 | from_id = data["id"]
51 | from_name = data["name"]
52 | add_query=("INSERT INTO likes "
53 | "(pageid, postid,commentid,replyid,fromname,fromid) "
54 | "VALUES (%s,%s,%s,%s,%s,%s)")
55 | add_data = (group_id,feed_id,comment_id,reply_id,from_name,from_id)
56 | insert_row(add_query,add_data)
57 | global fb_like_counter
58 | fb_like_counter+=1
59 |
60 | except Exception as ex:
61 | print str(ex)
62 | pass
63 |
64 | # This function store replys for selected comments (comment_id) in the replys table
65 | def get_fb_Feed_reply ( feed_id , group_id ,comment_id):
66 |
67 | current_page = graph_url + comment_id + "/comments/" + comments_parametrs + "&limit=" + str(fb_reply_max)
68 | web_response = urllib2.urlopen(current_page)
69 | readable_page = web_response.read()
70 | json_fbpage = json.loads(readable_page)
71 | for data in json_fbpage["data"]:
72 | try :
73 | message = data["message"].encode('utf8')
74 | created_time = data["created_time"]
75 | reply_id = data["id"]
76 | reply_from = data["from"]
77 | from_name = reply_from["name"]
78 | from_id = reply_from["id"]
79 | likes = data["likes"]
80 | summary_likes = likes["summary"]
81 | total_likes= summary_likes["total_count"]
82 | add_query=("INSERT INTO replys "
83 | "(pageid, postid,commentid,replyid,message,fromid,fromname,createdtime,total_likes) "
84 | "VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)")
85 | add_data = (group_id,feed_id,comment_id,reply_id,message,from_id,from_name,created_time,total_likes)
86 | insert_row(add_query,add_data)
87 | global fb_reply_counter
88 | fb_reply_counter+=1
89 | if(scrape_like):
90 | get_fb_like(group_id,feed_id ,comment_id,reply_id,"reply")
91 |
92 | except Exception as ex:
93 | print str(ex)
94 | pass
95 |
96 | # This function store comments for selected feed (feed_id) in the comments table
97 | def get_fb_Feed_comment ( feed_id , group_id ):
98 |
99 | current_page = graph_url + feed_id + "/comments/" + comments_parametrs + "&limit=" + str(fb_comment_max)
100 | web_response = urllib2.urlopen(current_page)
101 | readable_page = web_response.read()
102 | json_fbpage = json.loads(readable_page)
103 | for data in json_fbpage["data"]:
104 | try :
105 | message = data["message"].encode('utf8')
106 | created_time = data["created_time"]
107 | comment_id = data["id"]
108 | comment_from = data["from"]
109 | from_name = comment_from["name"]
110 | from_id = comment_from["id"]
111 | likes = data["likes"]
112 | summary_likes = likes["summary"]
113 | total_likes= summary_likes["total_count"]
114 | add_query=("INSERT INTO comments "
115 | "(pageid, postid,commentid,message,fromid,fromname,createdtime,total_likes) "
116 | "VALUES (%s,%s,%s,%s,%s,%s,%s,%s)")
117 | add_data = (group_id,feed_id,comment_id,message,from_id,from_name,created_time,total_likes)
118 | insert_row(add_query,add_data)
119 | global fb_comment_counter
120 | fb_comment_counter+=1
121 | if(scrape_like):
122 | get_fb_like(group_id,feed_id ,comment_id,"NULL","comment")
123 | if(scrape_reply):
124 | get_fb_Feed_reply(feed_id,group_id,comment_id)
125 |
126 |
127 | except Exception as ex:
128 | print str(ex)
129 | pass
130 |
131 | # This function store feeds which exist in data_url and update last group scanned by update next record in status table
132 | def get_fb_group_feeds( data_url ,group_id):
133 |
134 | current_page = data_url
135 | web_response = urllib2.urlopen(current_page)
136 | readable_page = web_response.read()
137 | json_fbpage = json.loads(readable_page)
138 |
139 | for data in json_fbpage["data"]:
140 | try :
141 | message = data["message"]
142 | created_time = data["created_time"]
143 | feed_id = data["id"]
144 | feed_from = data["from"]
145 | from_name = feed_from["name"]
146 | from_id = feed_from["id"]
147 | likes = data["likes"]
148 | summary_likes = likes["summary"]
149 | total_likes= summary_likes["total_count"]
150 | add_query=("INSERT INTO Posts "
151 | "(pageid, postid,message,fromid,fromname,createdtime,total_likes) "
152 | "VALUES (%s,%s,%s,%s,%s,%s,%s)")
153 | add_data = (group_id,feed_id,message,from_id,from_name,created_time,total_likes)
154 | insert_row(add_query,add_data)
155 | global fb_feed_counter
156 | fb_feed_counter+=1
157 | if(scrape_like):
158 | get_fb_like(group_id,feed_id ,"NULL","NULL","feed")
159 | if(scrape_comment):
160 | get_fb_Feed_comment(feed_id,group_id)
161 |
162 | if fb_feed_counter%10==0:
163 | print "\t"+str(fb_feed_counter) +" feeds and "+ str(fb_comment_counter)+ " comments and "+str(fb_reply_counter)+" reply and "+str(fb_like_counter)+" likes is scanned for group "+group_id
164 |
165 | except Exception as ex:
166 | #print str(ex)
167 | pass
168 |
169 | try :
170 | fb_paging = json_fbpage["paging"]
171 | next_group_page = fb_paging["next"]
172 | update_group_status(group_id,next_group_page)
173 | return next_group_page
174 | except Exception as ex:
175 | return None
176 |
177 | # This function store feeds for selected group by execute get_fb_group_feeds() more than one until reach to fb_feed_max
178 | def scan_fb_group(group_id):
179 |
180 | try:
181 | sql="select next from status where pageid='{}'"
182 | statement=sql.format(group_id)
183 | rows=excuteQuery(statement)
184 | if len(rows) == 0:
185 | current_page = graph_url + group_id + feeds_parametrs
186 | else :
187 | current_page = rows[0]['next']
188 |
189 | while (fb_feed_counter