Форум сайта python.su
import pygame
win = pygame.display.set_mode((500, 500))
pygame.display.set_caption("Cubes Game")
x=50
y=50
width=40
height=60
speed=5
run = True
while run:
pygame.time.delay(100)
for event in pygame.event.get():
if event.type == pygame.QUIT:
run=False
keys=pygame.key.get_pressed()
if keys[pygame.K_LEFT]:
x-=speed
if keys[pygame.K_RIGHT]:
x+=speed
if keys[pygame.K_UP]:
y-=speed
if keys[pygame.K_DOWN]:
y+=speed
pygame.draw.rect(win, (0,0,255), (x,y,width,height))
import requests from bs4 import BeautifulSoup import csv from datetime import datetime from multiprocessing import Pool def get_html(url): r = requests.get(url) return r.text # Возращает HTML - код страницы def get_total_pages(html): soup = BeautifulSoup(html, 'lxml') pages = soup.find('ul', class_='liga_menu').find_all('li') links = [] for li in pages: a = li.find('a').get('href') #берем ссылки матчей, это строка(string) link = 'https://1xstavka.ru/' + a links.append(link) #записывает в конец списка return links def get_page_data(html): soup = BeautifulSoup(html, 'lxml') try: name = soup.find('span', class_="c-events__teams").text.strip() except: name = '' try: schet = soup.find('div', class_='c-events__score').text.strip() except: schet = '' data = {'Матч': name, 'Счет': schet} return data def write_csv(data): with open('1xstavka.csv', 'a') as parser: #Парсер название файла куда все сохраниться writer = csv.writer(parser) writer.writerow( (data['Матч'], data['Счет']) ) print(data['Матч'], 'parsed\r') def make_all(url): html = get_html(url) data = get_page_data(html) write_csv(data) def main(): url = 'https://1xstavka.ru/live/Football/' all_links = get_total_pages( get_html(url) ) #for index, url in enumerate(all_links): #index придуманная переменная #html = get_html(url) #data = get_page_data(html) #write_csv(data) #print(index) #map(function, list_) with Pool(15) as p: p.map(make_all, all_links) if __name__ == '__main__': main()
logging.basicConfig(format = '%(levelname)-8s %(message)s', level = logging.DEBUG)
logging.info("""vm.overcommit_ratio is too small, you will not be able to use more than $overcommit_ratio*RAM+SWAP for applications""")
2018-06-06 11:52:15 MY-HOSTNAME root[16065] INFO vm.overcommit_ratio is too small, you will not be able to use more than $overcommit_ratio*RAM+SWAP for applications
CELERYD_HIJACK_ROOT_LOGGER = False
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'colored': {
'()': DjangoColorsFormatter,
'format': '[%(asctime)s] - %(levelname)s - %(message)s \n',
'datefmt': '%d/%b/%Y %H:%M:%S',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'colored',
},
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': False,
},
'django.request': {
'level': 'DEBUG',
'handlers': ['console'],
'propagate': False,
}
},
}
from celery.utils.log import get_task_logger logger = get_task_logger(__name__) @app.task() def my_new_task(): raise Exception('spam-eggs')
def my_new_task(): try: ... except: import traceback logger.info(traceback.format_exc())
import requests from bs4 import BeautifulSoup def get_html(url): r = requests.get(url) return r.text def get_total_matches(html): soup = BeautifulSoup(html, 'lxml') matches = soup.find('ul', class_='matches') print(matches) def main(): url = 'https://olimp.bet/app/live/1' html = get_html(url) get_total_matches(html) if __name__ == '__main__': main()
<div id="root"></div> <div id="modal-root"></div>