This changes are important they save the important data of the RSS feed into a vector

This commit is contained in:
Eddie 2017-10-25 11:49:09 -05:00
parent 824624df3a
commit 419eab8f63
2 changed files with 21 additions and 10 deletions

View File

@ -1,18 +1,30 @@
import yaml
import feedparser as fp
rawDat = open('rss_univ.txt', 'r')
strDat = rawDat.read()
rawDat = strDat.split(';\n')
index = len(rawDat) - 1
rawDat.pop(index)
strDat = yaml.load(rawDat[0])
strDat = []
for i in rawDat:
strDat.append(yaml.load(i))
del rawDat
impDat = []
for d in strDat:
impDat.append([d['entries'][0]['title'], d['entries'][0]['links'][0]['href'], d['entries'][0]['summary']])
del strDat
# this section of the code show how to extract relevant data from the dictionaries
print(len(rawDat))
print(strDat['entries'][0]['title'])
print(strDat['entries'][0]['links'][0]['href'])
print(strDat['entries'][0]['summary'])
"""
print(dic['entries'][0]['title'])
print(dic['entries'][0]['links'][0]['href'])
print(dic['entries'][0]['summary'])
"""

View File

@ -6,12 +6,12 @@ def get_data_rss():
datUniver = fp.parse('http://www.eluniversal.com.mx/seccion/1/rss.xml')
datJorn = fp.parse('http://www.jornada.unam.mx/rss/politica.xml?v=1')
datCnn = fp.parse('http://expansion.mx/rss/politica')
datAri = fp.parse('http://aristeguinoticias.com/category/mexico/feed/')
file = open('rss_univ.txt', 'a')
# file.write(str(datCnn.headers['Date']) + ';\n')
file.write(str(datCnn) + ';\n')
# file.write(str(datAri.headers['Date']) + ';\n')
file.write(str(datAri) + ';\n')
# file.write(str(datUniver.headers['Date']) + ';\n')
file.write(str(datUniver) + ';\n')
# file.write(str(datJorn.headers['Date']) + ';\n')
@ -19,7 +19,6 @@ def get_data_rss():
file.close()
# SOME COMMANDS OF FEEDPARSER
# print(datUniver['feed']['link'] + '\n')