Question

I am using scrapy for scraping news headlines and I am a rookie for scrapy and scraping as a whole. I am having huge issues for a few days now pipelining my scraped data into my SQL db. I have 2 classes in my pipelines.py file one for inserting items to Database and another for backing up scraped data into json file for front end web development reasons.

This is the code for my spider - its extracting news headlines from the start_urls - it picks up this data as strings using extract() and later on looping through all of them and using strip() to remove white spaces for better formatting

from scrapy.spider import Spider
from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import Selector
from scrapy.item import Item
from Aljazeera.items import AljazeeraItem
from datetime import date, datetime


class AljazeeraSpider(Spider):
    name = "aljazeera"
    allowed_domains = ["aljazeera.com"]
    start_urls = [
        "http://www.aljazeera.com/news/europe/",
        "http://www.aljazeera.com/news/middleeast/",
        "http://www.aljazeera.com/news/asia/",
        "http://www.aljazeera.com/news/asia-pacific/",
        "http://www.aljazeera.com/news/americas/",
        "http://www.aljazeera.com/news/africa/",
        "http://blogs.aljazeera.com/"

    ]

    def parse(self, response):
        sel = Selector(response)
        sites = sel.xpath('//td[@valign="bottom"]')
        contents = sel.xpath('//div[@class="indexSummaryText"]')
        items = []

        for site,content in zip(sites, contents):
            item = AljazeeraItem()
            item['headline'] = site.xpath('div[3]/text()').extract()
            item['content'] = site.xpath('div/a/text()').extract()
            item['date'] = str(date.today())
            for headline, content in zip(item['content'], item['headline']):
              item['headline'] = headline.strip()
              item['content'] = content.strip()
              items.append(item)
        return items

The Code for my pipeline.py is as follows :

import sys
import MySQLdb
import hashlib
from scrapy.exceptions import DropItem
from scrapy.http import Request
import json
import os.path

class SQLStore(object):
  def __init__(self):
    self.conn = MySQLdb.connect(user='root', passwd='', db='aj_db', host='localhost', charset="utf8", use_unicode=True)
    self.cursor = self.conn.cursor()
    #log data to json file


def process_item(self, item, spider): 

    try:
        self.cursor.execute("""INSERT INTO scraped_data(headlines, contents, dates) VALUES (%s, %s, %s)""", (item['headline'].encode('utf-8'), item['content'].encode('utf-8'), item['date'].encode('utf-8')))
        self.conn.commit()

    except MySQLdb.Error, e:
        print "Error %d: %s" % (e.args[0], e.args[1])

        return item



#log runs into back file 
class JsonWriterPipeline(object):

    def __init__(self):
        self.file = open('backDataOfScrapes.json', "w")

    def process_item(self, item, spider):
        line = json.dumps(dict(item)) + "\n"
        self.file.write("item === " + line)
        return item

And the settings.py is as follows :

BOT_NAME = 'Aljazeera'

SPIDER_MODULES = ['Aljazeera.spiders']
NEWSPIDER_MODULE = 'Aljazeera.spiders'

# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'Aljazeera (+http://www.yourdomain.com)'

ITEM_PIPELINES = {
    'Aljazeera.pipelines.JsonWriterPipeline': 300,
    'Aljazeera.pipelines.SQLStore': 300,
}

My sql setting are all ok. and after running scrapy crawl aljazeera it works and even outputs the items in json format as follows :

item === {"headline": "Turkey court says Twitter ban violates rights", "content": "Although ruling by Turkey's highest court is binding, it is unclear whether the government will overturn the ban.", "date": "2014-04-02"}

i really dont know or cant see what I am missing here. I would really appreciate if u guys could help me out.

Thanks for your time,

Was it helpful?

Solution

Your indentation is wrong in the SQLStore pipeline. I've tested with correct indentation and its working fine. Copy the below and it should be perfect.

class SQLStore(object):
def __init__(self):
    self.conn = MySQLdb.connect(user='root', passwd='', db='aj_db', host='localhost', charset="utf8", use_unicode=True)
    self.cursor = self.conn.cursor()
    #log data to json file


def process_item(self, item, spider): 

    try:
        self.cursor.execute("""INSERT INTO scraped_data(headlines, contents, dates) VALUES (%s, %s, %s)""", (item['headline'].encode('utf-8'), item['content'].encode('utf-8'), item['date'].encode('utf-8')))
        self.conn.commit()

    except MySQLdb.Error, e:
        print "Error %d: %s" % (e.args[0], e.args[1])

        return item
Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top