0.0.1 init, main page prepare
This commit is contained in:
292
BaseModels/OpenStreetMap/osm_api.py
Normal file
292
BaseModels/OpenStreetMap/osm_api.py
Normal file
@@ -0,0 +1,292 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import time
|
||||
|
||||
import overpass
|
||||
import copy
|
||||
from geopy.geocoders import Nominatim
|
||||
|
||||
Nominatim_last_request = None
|
||||
|
||||
def osm_api_request(data, res_type):
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
api = overpass.API(timeout=100)
|
||||
|
||||
res = api.get(data, responseformat=res_type)
|
||||
|
||||
pres_names = []
|
||||
res_list = []
|
||||
if res_type[:3].lower() == 'csv':
|
||||
data_type_list = copy.deepcopy(res[0])
|
||||
for line in res[1:]:
|
||||
if line[0] in pres_names:
|
||||
continue
|
||||
|
||||
i = 0
|
||||
line_Dict = {}
|
||||
while i < len(data_type_list):
|
||||
line_Dict.update({data_type_list[i]: line[i]})
|
||||
i += 1
|
||||
if 'name:en' in line_Dict and 'name' in line_Dict:
|
||||
if not line_Dict["name:en"]:
|
||||
if line_Dict["name"]:
|
||||
line_Dict["name:en"] = line_Dict["name"]
|
||||
|
||||
if not line_Dict["name:en"]:
|
||||
continue
|
||||
|
||||
res_list.append(line_Dict)
|
||||
pres_names.append(line[0])
|
||||
else:
|
||||
res_list = copy.deepcopy(res)
|
||||
|
||||
return res_list
|
||||
|
||||
|
||||
def osm_get_area_id_by_params_dict(paramsDict):
|
||||
|
||||
area_id = None
|
||||
|
||||
try:
|
||||
|
||||
# Geocoding request via Nominatim
|
||||
time.sleep(1)
|
||||
geolocator = Nominatim(user_agent='TWB')
|
||||
geo_results = geolocator.geocode(paramsDict, exactly_one=False, limit=3)
|
||||
|
||||
if not geo_results:
|
||||
return area_id
|
||||
|
||||
# Searching for relation in result set
|
||||
country_obj = None
|
||||
for r in geo_results:
|
||||
print(r.address, r.raw.get("osm_type"))
|
||||
if r.raw.get("osm_type") == "relation":
|
||||
country_obj = r
|
||||
break
|
||||
|
||||
if not country_obj:
|
||||
return area_id
|
||||
|
||||
# Calculating area id
|
||||
area_id = int(country_obj.raw.get("osm_id")) + 3600000000
|
||||
|
||||
except Exception as e:
|
||||
print(f'osm_get_area_id_by_params_dict Error = {e}')
|
||||
|
||||
return area_id
|
||||
|
||||
|
||||
def osm_get_countries():
|
||||
res_type = 'csv("name", "name:en", "name:ru", "ISO3166-1", "flag", "int_name", "official_name", "ISO3166-1:alpha3", ' \
|
||||
'"ISO3166-1:numeric", ::lon, ::lat)'
|
||||
data = 'relation["admin_level"="2"]["ISO3166-1:alpha3"~"^...$"]; ' \
|
||||
'out center;'
|
||||
|
||||
res = osm_api_request(data, res_type)
|
||||
from operator import itemgetter
|
||||
res = sorted(res, key=itemgetter('name'))
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def osm_get_cities_by_country(country_Dict):
|
||||
|
||||
res = []
|
||||
|
||||
try:
|
||||
|
||||
if 'area_id' in country_Dict and country_Dict['area_id']:
|
||||
area_id = country_Dict['area_id']
|
||||
else:
|
||||
return []
|
||||
|
||||
res_type = 'csv("name", "name:ru", "name:en", ::lon, ::lat)'
|
||||
data = f'area({area_id})->.searchArea;' \
|
||||
'(node[place~"city$|town$"](area.searchArea););' \
|
||||
'out center;'
|
||||
|
||||
res = osm_api_request(data, res_type)
|
||||
|
||||
except Exception as e:
|
||||
print(f'osm_get_cities_by_country Error = {e}')
|
||||
|
||||
return res
|
||||
|
||||
|
||||
|
||||
def osm_get_airports(area_id, city_find_str=None):
|
||||
|
||||
airports_cities_Dict = {}
|
||||
|
||||
try:
|
||||
|
||||
if not area_id:
|
||||
return airports_cities_Dict
|
||||
|
||||
res_type = 'csv("name", "name:ru", "name:en", "iata", "icao", "place", "int_name", "addr:country", "city_served", ::lon, ::lat)'
|
||||
data = f'area({area_id})->.searchArea;' \
|
||||
f'nwr["aeroway"="aerodrome"]["iata"~"^...$"]["icao"~"^....$"]["abandoned"!~".*"]["landuse"!="military"]["was:landuse"!="military"]["aerodrome:type"!~"airfield|military|private"]["amenity"!="flight_school"]["closed"!="yes"](area.searchArea)->.airports;' \
|
||||
|
||||
if city_find_str:
|
||||
data = f'{data}' \
|
||||
f'foreach.airports->.elem(nwr(around.elem:20000)[{city_find_str}]->.city; .elem out center; .city out center;);'
|
||||
else:
|
||||
data = f'{data} .airports out center;'
|
||||
|
||||
res = osm_api_request(data, res_type)
|
||||
|
||||
if not city_find_str:
|
||||
return res
|
||||
|
||||
present_IATA_list = []
|
||||
|
||||
i = 0
|
||||
while i < len(res):
|
||||
|
||||
if not res[i]['iata'] or not res[i]['icao']:
|
||||
del res[i]
|
||||
continue
|
||||
|
||||
if res[i]['iata'] and res[i]['iata'] in present_IATA_list:
|
||||
del res[i]
|
||||
continue
|
||||
present_IATA_list.append(res[i]['iata'])
|
||||
|
||||
# если аэропорт
|
||||
if res[i]['iata']:
|
||||
if i + 1 < len(res) and res[i+1]['place']:
|
||||
|
||||
# ищем город на следующих строках
|
||||
i2 = i + 1
|
||||
linked = None
|
||||
while i2 < len(res) and res[i2]['place']:
|
||||
if not linked:
|
||||
if (res[i2]['name'] and res[i]['name'] and res[i2]['name'] in res[i]['name']) or \
|
||||
(res[i2]['name:en'] and res[i]['name:en'] and res[i2]['name:en'] in res[i]['name:en']) or \
|
||||
(res[i2]['name:ru'] and res[i]['name:ru'] and res[i2]['name:ru'] in res[i]['name:ru']):
|
||||
linked = i2
|
||||
|
||||
i2 += 1
|
||||
|
||||
if not linked:
|
||||
linked = i + 1
|
||||
|
||||
res[i]['city'] = copy.deepcopy(res[linked])
|
||||
if not res[i]['city']['name'] in airports_cities_Dict:
|
||||
airports_cities_Dict.update({res[i]['city']['name:en']: [res[i]]})
|
||||
else:
|
||||
airports_cities_Dict[res[i]['city']['name:en']].append(res[i])
|
||||
|
||||
while i < i2:
|
||||
del res[i]
|
||||
i2 -= 1
|
||||
|
||||
continue
|
||||
|
||||
# если не найдена связка с городом
|
||||
elif res[i]['city_served']:
|
||||
|
||||
res[i]['city'] = {'name:en': res[i]['city_served']}
|
||||
if not res[i]['city']['name:en'] in airports_cities_Dict:
|
||||
airports_cities_Dict.update({res[i]['city']['name:en']: [res[i]]})
|
||||
else:
|
||||
airports_cities_Dict[res[i]['city']['name:en']].append(res[i])
|
||||
del res[i]
|
||||
continue
|
||||
|
||||
# текущий элемент - не аэропорт - удаляем
|
||||
else:
|
||||
del res[i]
|
||||
continue
|
||||
|
||||
i += 1
|
||||
|
||||
|
||||
if res:
|
||||
airports_cities_Dict[None] = res
|
||||
|
||||
except Exception as e:
|
||||
print(f'osm_get_airports Error = {e}')
|
||||
if e.args[0] == 25:
|
||||
return {'error': 'timeout'}
|
||||
|
||||
|
||||
return airports_cities_Dict
|
||||
|
||||
|
||||
|
||||
def osm_get_country_w_cities_n_airports(country_Dict, area_id):
|
||||
print(f'{country_Dict["name:en"]}')
|
||||
|
||||
if area_id:
|
||||
country_Dict['area_id'] = area_id
|
||||
else:
|
||||
country_Dict['area_id'] = osm_get_area_id_by_params_dict({'country': country_Dict['name']})
|
||||
|
||||
airports_Dict = osm_get_airports(country_Dict['area_id'], 'place~"city$|town$"')
|
||||
if airports_Dict and 'error' in airports_Dict and airports_Dict['error'] == 'timeout':
|
||||
airports_Dict = osm_get_airports(country_Dict['area_id'], 'place="city"')
|
||||
|
||||
from ReferenceDataApp.funcs import get_countries_key_data, get_cities_by_country_name_en
|
||||
db_cities = get_cities_by_country_name_en(country_Dict["name:en"])
|
||||
|
||||
cities = osm_get_cities_by_country(country_Dict)
|
||||
for city in cities:
|
||||
try:
|
||||
print(f' > {city["name:en"]}')
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
|
||||
if airports_Dict and 'error' in airports_Dict and airports_Dict['error'] == 'timeout':
|
||||
|
||||
if city['name:en'] in db_cities.keys():
|
||||
city['area_id'] = db_cities[city['name:en']]
|
||||
else:
|
||||
city['area_id'] = osm_get_area_id_by_params_dict(
|
||||
{'country': country_Dict['name:en'], 'city': city['name:en']})
|
||||
|
||||
airports_list = osm_get_airports(city['area_id'])
|
||||
if airports_list:
|
||||
city['airports'] = copy.deepcopy(airports_list)
|
||||
else:
|
||||
if city['name:en'] in airports_Dict.keys():
|
||||
city['airports'] = copy.deepcopy(airports_Dict[city['name:en']])
|
||||
del airports_Dict[city['name:en']]
|
||||
|
||||
if not 'airports' in city:
|
||||
city['airports'] = []
|
||||
|
||||
print(f' > > airports count={str(len(city["airports"]))}')
|
||||
|
||||
city['parsing_status'] = 'finished'
|
||||
|
||||
country_Dict['cities'] = cities
|
||||
country_Dict['parsing_status'] = 'finished'
|
||||
|
||||
airports_wo_city = []
|
||||
# if airports_Dict and None in airports_Dict:
|
||||
# airports_wo_city = airports_Dict[None]
|
||||
|
||||
return country_Dict, airports_wo_city
|
||||
|
||||
|
||||
def osm_get_countries_n_cities_n_airports():
|
||||
|
||||
airports_wo_city = []
|
||||
|
||||
from ReferenceDataApp.funcs import get_countries_key_data, get_cities_by_country_name_en
|
||||
db_countries = get_countries_key_data()
|
||||
|
||||
countries = osm_get_countries()
|
||||
i = 0
|
||||
while i < len(countries):
|
||||
area_id = None
|
||||
if countries[i]['name:en'] in db_countries.keys():
|
||||
area_id = db_countries[countries[i]['name:en']]
|
||||
countries[i], airports_wo_city_for_country = osm_get_country_w_cities_n_airports(countries[i], area_id)
|
||||
if airports_wo_city_for_country:
|
||||
airports_wo_city.extend(airports_wo_city_for_country)
|
||||
|
||||
return countries
|
||||
68
BaseModels/SMS_sender.py
Normal file
68
BaseModels/SMS_sender.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
__author__ = 'SDE'
|
||||
|
||||
import urllib3
|
||||
import json
|
||||
|
||||
def send_SMS(phone, text, urgent=False, staff=False):
|
||||
import re
|
||||
from BaseModels.mailSender import techSendMail
|
||||
print('send_SMS')
|
||||
|
||||
# change all + for GET request
|
||||
text = text.replace(' ', '+')
|
||||
text = text.encode('utf-8')
|
||||
|
||||
if not staff:
|
||||
phone = phone.replace(' ', '')
|
||||
p = re.compile('\d{7,12}')
|
||||
phone_list = p.findall(phone)
|
||||
|
||||
if not phone_list:
|
||||
return u'phone DoesNotExist'
|
||||
|
||||
phone = phone_list[0]
|
||||
|
||||
phone.encode('utf-8')
|
||||
|
||||
http_request = 'http://cp.websms.by/?r=api/msg_send' \
|
||||
'&user=administrator@baldenini.by' \
|
||||
'&apikey=zTwevODOYl' \
|
||||
'&sender=Baldenini'
|
||||
# '&test=1'
|
||||
|
||||
if urgent:
|
||||
http_request = http_request + '&urgent=1'
|
||||
http_request = http_request + '&recipients=' + phone
|
||||
|
||||
http_request = http_request.encode('utf-8')
|
||||
|
||||
http_request = http_request + '&message=' + text
|
||||
|
||||
http = urllib3.PoolManager()
|
||||
|
||||
r = http.request('GET', http_request)
|
||||
|
||||
r_status = json.loads(r.data)
|
||||
|
||||
if r_status['status'] == 'error':
|
||||
message = r_status['message']
|
||||
try:
|
||||
req = http_request.decode('utf-8')
|
||||
message = req + u'<br>' + message
|
||||
# message = message.decode('utf-8')
|
||||
techSendMail(message)
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
message = None
|
||||
|
||||
stat = {
|
||||
'status' : r_status,
|
||||
'message' : message,
|
||||
}
|
||||
|
||||
print('sms_status', phone, stat)
|
||||
|
||||
return r_status
|
||||
# return u'Accept'
|
||||
1
BaseModels/__init__.py
Normal file
1
BaseModels/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__author__ = 'SDE'
|
||||
303
BaseModels/admin_utils.py
Normal file
303
BaseModels/admin_utils.py
Normal file
@@ -0,0 +1,303 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
__author__ = 'SDE'
|
||||
|
||||
from django.contrib.admin.widgets import AdminFileWidget, AdminTextareaWidget
|
||||
from django.forms import widgets
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.db import models
|
||||
from django.contrib import admin
|
||||
from django.contrib.contenttypes.admin import GenericTabularInline, GenericStackedInline
|
||||
|
||||
import re
|
||||
# from modeltranslation.admin import TranslationAdmin
|
||||
|
||||
# from filebrowser.admin import
|
||||
|
||||
|
||||
def get_base_fieldsets():
|
||||
|
||||
fieldsets = [
|
||||
[None, {
|
||||
'classes': ['wide'],
|
||||
'fields': (
|
||||
('name',),
|
||||
('name_plural',),
|
||||
('url'),
|
||||
'FAQ_title'
|
||||
)
|
||||
}],
|
||||
|
||||
[None, {
|
||||
'classes': ['wide'],
|
||||
'fields': (
|
||||
('enable', 'order'),
|
||||
('open_left_curtain_when_render', 'open_right_curtain_when_render')
|
||||
)
|
||||
}],
|
||||
|
||||
(u'Описание и текст', {
|
||||
'classes': ['wide', 'collapse'],
|
||||
'fields': (
|
||||
'description', 'text',
|
||||
)
|
||||
}),
|
||||
|
||||
(u'Промо ФОН', {
|
||||
'classes': ['wide', 'collapse'],
|
||||
'fields': (
|
||||
'background_promo_show', 'background_promo_inherits',
|
||||
'background_image',
|
||||
'background_title', 'background_txt', 'background_dates_txt',
|
||||
'background_subred_txt', 'background_promo_url', 'background_txt_color'
|
||||
)
|
||||
}),
|
||||
|
||||
(u'SEO', {
|
||||
'classes': ['wide', 'collapse'],
|
||||
'fields': (
|
||||
'seo_title',
|
||||
'seo_description',
|
||||
'seo_keywords',
|
||||
'seo_text'
|
||||
)
|
||||
}),
|
||||
|
||||
(u'Партнерские ссылки', {
|
||||
'classes': ['wide', 'collapse'],
|
||||
'fields': (
|
||||
'link_left_promo_show',
|
||||
'link_left_promo_logo',
|
||||
'link_left_promo_url',
|
||||
'link_left_promo_name',
|
||||
'link_left_promo_text',
|
||||
|
||||
'link_right_promo_show',
|
||||
'link_right_promo_logo',
|
||||
'link_right_promo_url',
|
||||
'link_right_promo_name',
|
||||
'link_right_promo_text',
|
||||
)
|
||||
}),
|
||||
]
|
||||
|
||||
return fieldsets
|
||||
|
||||
|
||||
class AdminImageWidget(AdminFileWidget):
|
||||
|
||||
def render(self, name, value, attrs=None, renderer=None):
|
||||
output = []
|
||||
if value and getattr(value, "url", None):
|
||||
output.append(u'<img src="{url}" style="max-width: 150px; max-height: 150px; width: auto; height: auto; margin: 10px 10px 10px 10px;"/> '.format(url=value.url))
|
||||
output.append(super(AdminFileWidget, self).render(name, value, attrs))
|
||||
|
||||
return mark_safe(u''.join(output))
|
||||
|
||||
|
||||
def init_formfield_for_dbfield(class_model, self, db_field, request, **kwargs):
|
||||
formfield = super(class_model, self).formfield_for_dbfield(db_field, request, **kwargs)
|
||||
if db_field.name == 'url' or db_field.name == 'name' or db_field.name == 'title' or db_field.name == 'name_plural':
|
||||
formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 80%'})
|
||||
if db_field.name == 'workListForServicePage':
|
||||
formfield.widget = admin.widgets.AdminTextInputWidget( attrs={'style': 'width: 80%'})
|
||||
if db_field.name == 'seo_title':
|
||||
formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 80%'})
|
||||
if db_field.name == 'seo_description' or db_field.name == 'seo_keywords':
|
||||
formfield.widget = admin.widgets.AdminTextareaWidget()
|
||||
if db_field.name in ('background_title', 'background_dates_txt', 'background_subred_txt', 'background_promo_url'):
|
||||
formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 80%'})
|
||||
if db_field.name in ['fixed_address']:
|
||||
formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 80%'})
|
||||
if db_field.name in ('background_txt', 'description'):
|
||||
formfield.widget = admin.widgets.AdminTextareaWidget(attrs={'style': 'width: 80%'})
|
||||
if db_field.name in ('lexems',):
|
||||
formfield.widget = admin.widgets.AdminTextareaWidget(attrs={'style': 'width: 80%'})
|
||||
# if db_field.name == 'answer':
|
||||
# formfield.widget = admin.widgets.AdminTextareaWidget(attrs={'style': 'width: 80%'})
|
||||
if db_field.name in ['question', 'FAQ_title']:
|
||||
formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 80%'})
|
||||
|
||||
# if db_field.name in ['text']:
|
||||
# formfield.widget.attrs.update({'style': 'width: 80%'})
|
||||
|
||||
if formfield and formfield.widget:
|
||||
# if type(formfield.widget) in (admin.widgets.AdminSplitDateTime, ):
|
||||
# formfield.widget.attrs.update({'style': 'width: 400px'})
|
||||
|
||||
if type(formfield.widget) in (admin.widgets.AdminTextareaWidget, ):
|
||||
formfield.widget.attrs.update({'style': 'width: 80%'})
|
||||
pass
|
||||
|
||||
if db_field.name in ['url']:
|
||||
formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 500px'})
|
||||
|
||||
|
||||
return formfield
|
||||
|
||||
|
||||
def get_image_thumb(self, obj):
|
||||
image_url = None
|
||||
|
||||
try:
|
||||
if obj.image:
|
||||
image_url = obj.image.url
|
||||
except:
|
||||
image_url = None
|
||||
|
||||
if not image_url:
|
||||
try:
|
||||
if obj.images_gallery:
|
||||
image_url = obj.images_gallery.first().url
|
||||
except:
|
||||
image_url = None
|
||||
|
||||
if not image_url:
|
||||
try:
|
||||
image_url = obj.picture.url
|
||||
except:
|
||||
image_url = None
|
||||
|
||||
|
||||
if not image_url:
|
||||
try:
|
||||
image_url = obj.icon.url
|
||||
if not '.' in image_url:
|
||||
image_url = None
|
||||
except:
|
||||
image_url = None
|
||||
|
||||
if not image_url:
|
||||
try:
|
||||
image_url = obj.main_photo().url
|
||||
except:
|
||||
image_url = None
|
||||
|
||||
if not image_url:
|
||||
try:
|
||||
image_url = obj.offer.main_photo().url
|
||||
except:
|
||||
image_url = None
|
||||
|
||||
if not image_url:
|
||||
try:
|
||||
image_url = obj.rel_product.main_photo().url
|
||||
except:
|
||||
image_url = None
|
||||
|
||||
if not image_url:
|
||||
try:
|
||||
image_url = obj.logo.url
|
||||
except:
|
||||
image_url = None
|
||||
|
||||
if not image_url:
|
||||
try:
|
||||
image_url = obj.photo.url
|
||||
except:
|
||||
image_url = None
|
||||
|
||||
if not image_url:
|
||||
try:
|
||||
image_url = obj.picture.url
|
||||
except:
|
||||
image_url = None
|
||||
|
||||
if image_url:
|
||||
s = str('<img style="max-height: 100px; max-width: 100px;" src="' + image_url + '" />') #width="60"
|
||||
return mark_safe(s)
|
||||
else:
|
||||
return '(none)'
|
||||
|
||||
|
||||
class Admin_GenericBaseIconStackedInline(GenericStackedInline):
|
||||
|
||||
def formfield_for_dbfield (self, db_field, **kwargs):
|
||||
return init_formfield_for_dbfield(Admin_GenericBaseIconStackedInline, self, db_field, **kwargs)
|
||||
|
||||
def image_thumb(self, obj):
|
||||
return get_image_thumb(self, obj)
|
||||
|
||||
image_thumb.short_description = u'Миниатюра'
|
||||
image_thumb.allow_tags = True
|
||||
|
||||
|
||||
|
||||
class Admin_BaseIconStackedInline(admin.StackedInline):
|
||||
|
||||
def formfield_for_dbfield (self, db_field, **kwargs):
|
||||
return init_formfield_for_dbfield(Admin_BaseIconStackedInline, self, db_field, **kwargs)
|
||||
|
||||
def image_thumb(self, obj):
|
||||
return get_image_thumb(self, obj)
|
||||
|
||||
image_thumb.short_description = u'Миниатюра'
|
||||
image_thumb.allow_tags = True
|
||||
|
||||
|
||||
class Admin_BaseIconTabularModel(admin.TabularInline):
|
||||
|
||||
def formfield_for_dbfield (self, db_field, **kwargs):
|
||||
return init_formfield_for_dbfield(Admin_BaseIconTabularModel, self, db_field, **kwargs)
|
||||
|
||||
def image_thumb(self, obj):
|
||||
return get_image_thumb(self, obj)
|
||||
|
||||
image_thumb.short_description = u'Миниатюра'
|
||||
image_thumb.allow_tags = True
|
||||
|
||||
|
||||
class Admin_BaseIconModel(admin.ModelAdmin):
|
||||
|
||||
def description_exists(self, obj):
|
||||
|
||||
if obj.description:
|
||||
s = obj.description[:30]
|
||||
else:
|
||||
s = '-'
|
||||
|
||||
return s
|
||||
|
||||
description_exists.short_description = u'Описание'
|
||||
description_exists.allow_tags = True
|
||||
|
||||
def formfield_for_dbfield (self, db_field, request, **kwargs):
|
||||
return init_formfield_for_dbfield(Admin_BaseIconModel, self, db_field, request, **kwargs)
|
||||
|
||||
formfield_overrides = {
|
||||
models.ImageField: {'widget': AdminImageWidget},
|
||||
}
|
||||
|
||||
def image_thumb(self, obj):
|
||||
return get_image_thumb(self, obj)
|
||||
|
||||
image_thumb.short_description = u'Миниатюра'
|
||||
image_thumb.allow_tags = True
|
||||
|
||||
|
||||
# from modeltranslation.admin import TranslationAdmin
|
||||
# # class CustomTranslationAdmin(TranslationAdmin):
|
||||
# # # def formfield_for_dbfield(self, db_field, **kwargs):
|
||||
# # # field = super().formfield_for_dbfield(db_field, **kwargs)
|
||||
# # # self.patch_translation_field(db_field, field, **kwargs)
|
||||
# # # return field
|
||||
#
|
||||
#
|
||||
# class AdminTranslation_BaseIconModel(Admin_BaseIconModel, TranslationAdmin):
|
||||
#
|
||||
# # def formfield_for_dbfield(self, db_field, **kwargs):
|
||||
# # field = super(AdminTranslation_BaseIconModel, self).formfield_for_dbfield(db_field, **kwargs)
|
||||
# # self.patch_translation_field(db_field, field, **kwargs)
|
||||
# # return field
|
||||
#
|
||||
# class Media:
|
||||
#
|
||||
# js = (
|
||||
# 'modeltranslation/js/force_jquery.js',
|
||||
# 'http://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js',
|
||||
# 'http://ajax.googleapis.com/ajax/libs/jqueryui/1.10.2/jquery-ui.min.js',
|
||||
# 'modeltranslation/js/tabbed_translation_fields.js',
|
||||
# )
|
||||
# css = {
|
||||
# 'screen': ('modeltranslation/css/tabbed_translation_fields.css',),
|
||||
# }
|
||||
0
BaseModels/api/__init__.py
Normal file
0
BaseModels/api/__init__.py
Normal file
43
BaseModels/api/api_export_xls.py
Normal file
43
BaseModels/api/api_export_xls.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from openpyxl import Workbook
|
||||
from django.http import HttpResponse
|
||||
from openpyxl.writer.excel import save_virtual_workbook
|
||||
|
||||
def xls_export(data, filename):
|
||||
print('xls_export')
|
||||
|
||||
# wb = Workbook()
|
||||
# ws = wb.active
|
||||
#
|
||||
# r = 1
|
||||
# for row in data:
|
||||
# c = 1
|
||||
# for val in row.values():
|
||||
# try:
|
||||
# ws.cell(row=r, column=c).value = val
|
||||
# except:
|
||||
# ws.cell(row=r, column=c).value = str(val)
|
||||
# c += 1
|
||||
#
|
||||
# r += 1
|
||||
#
|
||||
# dims = {}
|
||||
# for row in ws.rows:
|
||||
# for cell in row:
|
||||
# if cell.value:
|
||||
# dims[cell.column] = max((dims.get(cell.column, 0), len(str(cell.value))))
|
||||
# for col, value in dims.items():
|
||||
# ws.column_dimensions[col].width = value
|
||||
|
||||
# filepath = "/demo.xlsx"
|
||||
# wb.save(filepath)
|
||||
|
||||
# output = BytesIO()
|
||||
# wb.save(output)
|
||||
|
||||
from ..office_documents_utils import get_xls_file_by_data_list
|
||||
xls_file = get_xls_file_by_data_list(data)
|
||||
|
||||
response = HttpResponse(xls_file, content_type='application/ms-excel')
|
||||
response['Content-Disposition'] = 'attachment; filename="{0}"'.format(filename)
|
||||
|
||||
return response
|
||||
23
BaseModels/api/api_inter.py
Normal file
23
BaseModels/api/api_inter.py
Normal file
@@ -0,0 +1,23 @@
|
||||
|
||||
|
||||
def check_and_get_specific_output_format(obj, data=None, filename=None):
|
||||
|
||||
if obj.request.query_params and 'output_format' in obj.request.query_params and obj.request.query_params['output_format'] == 'xlsx':
|
||||
|
||||
if not data:
|
||||
serializer = obj.get_serializer(obj.get_queryset(), many=True)
|
||||
data = serializer.data
|
||||
|
||||
from .api_export_xls import xls_export
|
||||
return xls_export(data, filename)
|
||||
|
||||
return None
|
||||
|
||||
def fix_txt_for_use_in_interlinks(txt):
|
||||
txt = txt.replace('/', ' ')
|
||||
txt = txt.replace('?', ' ')
|
||||
txt = txt.replace(';', ' ')
|
||||
txt = txt.replace(',', ' ')
|
||||
txt = txt.replace('+', ' ')
|
||||
txt = txt.replace(':', ' ')
|
||||
return txt
|
||||
19
BaseModels/api/api_middlewares.py
Normal file
19
BaseModels/api/api_middlewares.py
Normal file
@@ -0,0 +1,19 @@
|
||||
# from rest_framework import viewsets
|
||||
#
|
||||
# class APILogMiddleware(viewsets.ModelViewSet):
|
||||
# # def __init__(self, get_response):
|
||||
# # self.get_response = get_response
|
||||
# # One-time configuration and initialization.
|
||||
#
|
||||
# def __call__(self, request):
|
||||
# # Code to be executed for each request before
|
||||
# # the view (and later middleware) are called.
|
||||
#
|
||||
# response = self.get_response(request)
|
||||
#
|
||||
# self
|
||||
#
|
||||
# # Code to be executed for each request/response after
|
||||
# # the view is called.
|
||||
#
|
||||
# return response
|
||||
22
BaseModels/api/base_api_parsers.py
Normal file
22
BaseModels/api/base_api_parsers.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import codecs
|
||||
|
||||
from django.conf import settings
|
||||
from rest_framework.exceptions import ParseError
|
||||
from rest_framework.parsers import BaseParser
|
||||
|
||||
class PlainTextParser(BaseParser):
|
||||
media_type = "text/plain"
|
||||
|
||||
def parse(self, stream, media_type=None, parser_context=None):
|
||||
"""
|
||||
Parses the incoming bytestream as Plain Text and returns the resulting data.
|
||||
"""
|
||||
parser_context = parser_context or {}
|
||||
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
|
||||
|
||||
try:
|
||||
decoded_stream = codecs.getreader(encoding)(stream)
|
||||
text_content = decoded_stream.read()
|
||||
return text_content
|
||||
except ValueError as exc:
|
||||
raise ParseError('Plain text parse error - %s' % str(exc))
|
||||
36
BaseModels/api/base_api_permissions.py
Normal file
36
BaseModels/api/base_api_permissions.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from rest_framework.permissions import BasePermission
|
||||
|
||||
class StaffOnly_perm(BasePermission):
|
||||
"""
|
||||
Allows access only to staff users.
|
||||
"""
|
||||
def has_permission(self, request, view):
|
||||
return request.user and request.user.is_staff
|
||||
|
||||
|
||||
class api_1C_perm(BasePermission):
|
||||
"""
|
||||
Allows access only 1C users.
|
||||
"""
|
||||
|
||||
# def has_object_permission(self, request, view, obj):
|
||||
def has_permission(self, request, view):
|
||||
if request.user.id == 8751:
|
||||
try:
|
||||
if request.req_type == 'warehouse_import':
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
except:
|
||||
return False
|
||||
perm = request.user.has_perm('AuthApp.1c_api')
|
||||
return perm
|
||||
|
||||
|
||||
class full_api_perm(BasePermission):
|
||||
"""
|
||||
Allows access only users w full access.
|
||||
"""
|
||||
def has_permission(self, request, view):
|
||||
return request.user.has_perm('AuthApp.full_api')
|
||||
|
||||
44
BaseModels/api/base_api_serializers.py
Normal file
44
BaseModels/api/base_api_serializers.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from rest_framework import serializers
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from BaseModels.mailSender import techSendMail
|
||||
|
||||
|
||||
class Import_Element_Srializer(serializers.Serializer):
|
||||
element = serializers.JSONField()
|
||||
class Meta:
|
||||
fields = (
|
||||
'element',
|
||||
)
|
||||
|
||||
|
||||
class Import_Pocket_Srializer(serializers.Serializer):
|
||||
timestamp = serializers.IntegerField()
|
||||
warehouse = serializers.CharField()
|
||||
data_list = Import_Element_Srializer(many=True)
|
||||
|
||||
class Meta:
|
||||
fields = (
|
||||
'timestamp', 'warehouse', 'data_list'
|
||||
)
|
||||
|
||||
|
||||
|
||||
class Generic_base_Serializer(serializers.ModelSerializer):
|
||||
linked_object_type = serializers.CharField(required=False)
|
||||
|
||||
def create(self, validated_data):
|
||||
if 'linked_object_type' in validated_data:
|
||||
try:
|
||||
validated_data['content_type'] = ContentType.objects.get(model=validated_data['linked_object_type'])
|
||||
del validated_data['linked_object_type']
|
||||
except Exception as e:
|
||||
msg = 'Ошибка создания generic объекта<br>{0}({1})<br>{2}'.format(
|
||||
str(e),
|
||||
str(e.args),
|
||||
str(validated_data)
|
||||
)
|
||||
print(msg)
|
||||
title = 'ОШИБКА tE Generic_base_Serializer create'
|
||||
techSendMail(msg, title)
|
||||
|
||||
return super(Generic_base_Serializer, self).create(validated_data)
|
||||
356
BaseModels/api/base_api_views.py
Normal file
356
BaseModels/api/base_api_views.py
Normal file
@@ -0,0 +1,356 @@
|
||||
# coding=utf-8
|
||||
from rest_framework import generics
|
||||
from rest_framework.authentication import BasicAuthentication, SessionAuthentication
|
||||
from rest_framework.permissions import IsAuthenticated, DjangoObjectPermissions
|
||||
from rest_framework.views import APIView
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
from rest_framework.permissions import AllowAny
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.schemas import SchemaGenerator
|
||||
from rest_framework_swagger import renderers
|
||||
from BaseModels.api.base_api_permissions import *
|
||||
from datetime import datetime
|
||||
from GeneralApp.temp_data_funcs import add_element_in_tmp_data_list, check_exists_element_in_tmp_data_list, add_element_list_to_tmp_data
|
||||
from rest_framework.utils.serializer_helpers import ReturnList
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework import status
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
from BaseModels.mailSender import techSendMail
|
||||
# from BaseModels.api.api_middlewares import APILogMiddleware
|
||||
|
||||
|
||||
class SwaggerSchemaView(APIView):
|
||||
permission_classes = [AllowAny]
|
||||
renderer_classes = [
|
||||
renderers.OpenAPIRenderer,
|
||||
renderers.SwaggerUIRenderer
|
||||
]
|
||||
|
||||
def get(self, request):
|
||||
generator = SchemaGenerator()
|
||||
schema = generator.get_schema(request=request)
|
||||
|
||||
return Response(schema)
|
||||
|
||||
|
||||
|
||||
|
||||
JSONCustomRenderer = JSONRenderer
|
||||
JSONCustomRenderer.charset = 'utf-8'
|
||||
|
||||
|
||||
class APIBasePublicClass(APIView):
|
||||
# authentication_classes = (SessionAuthentication, BasicAuthentication)
|
||||
permission_classes = (AllowAny,)
|
||||
# renderer_classes = [JSONCustomRenderer]
|
||||
pagination_class = None
|
||||
|
||||
# def finalize_response(self, request, response, *args, **kwargs):
|
||||
#
|
||||
# res = super(APIBasePublicClass, self).finalize_response(request, response, *args, **kwargs)
|
||||
#
|
||||
# from CompaniesApp.models import Region
|
||||
# regions = Region.objects.filter().values_list(
|
||||
# 'id', 'domain'
|
||||
# ).order_by('id')
|
||||
# res.data.update({'regions': tuple(regions)})
|
||||
#
|
||||
# return res
|
||||
|
||||
|
||||
# def get(self, request, *args, **kwargs):
|
||||
#
|
||||
# if not 'region_id' in request.headers:
|
||||
# request.headers['region_id'] = '1'
|
||||
#
|
||||
# return super(APIBasePublicClass, self).get(request, *args, **kwargs)
|
||||
|
||||
|
||||
class APIListBaseClass(generics.ListAPIView):
|
||||
# authentication_classes = (SessionAuthentication, BasicAuthentication, )#
|
||||
permission_classes = (IsAuthenticated,)
|
||||
pagination_class = None
|
||||
|
||||
class APIBaseClass(generics.RetrieveAPIView):
|
||||
# authentication_classes = (SessionAuthentication, BasicAuthentication, )#
|
||||
permission_classes = (IsAuthenticated, )
|
||||
# renderer_classes = [JSONCustomRenderer]
|
||||
pagination_class = None
|
||||
|
||||
|
||||
class APIBaseSimplaClass(generics.GenericAPIView):
|
||||
# authentication_classes = (SessionAuthentication, BasicAuthentication)
|
||||
permission_classes = (IsAuthenticated,)
|
||||
# renderer_classes = [JSONCustomRenderer]
|
||||
pagination_class = None
|
||||
|
||||
# ----------------------------
|
||||
|
||||
class APIViewSet_ModelReadOnlyClass(viewsets.ReadOnlyModelViewSet):
|
||||
pass
|
||||
# authentication_classes = (SessionAuthentication, BasicAuthentication, )#
|
||||
permission_classes = (IsAuthenticated, )
|
||||
# renderer_classes = [JSONCustomRenderer]
|
||||
pagination_class = None
|
||||
|
||||
|
||||
|
||||
exclude_actions_for_logging = []
|
||||
|
||||
create_kwargs = [
|
||||
'create', 'create_short', 'create_item',
|
||||
'copy_item', 'create_short', 'create_reminder'
|
||||
]
|
||||
|
||||
exclude_actions_for_logging.extend(create_kwargs)
|
||||
exclude_actions_for_logging.extend([
|
||||
'update', 'partial_update', 'destroy', 'update_items', 'update_item'
|
||||
])
|
||||
|
||||
def log_save_cur_state_obj(query_data, response=None, init=False):
|
||||
|
||||
if query_data.basename == 'alert' or not query_data.action in exclude_actions_for_logging:
|
||||
return None
|
||||
|
||||
if response and response.status_code > 299:
|
||||
return None
|
||||
|
||||
data_Dict = {}
|
||||
data_target = 'log_{0}'.format(str(query_data.basename))
|
||||
obj_id = None
|
||||
|
||||
try:
|
||||
|
||||
if type(query_data.request.data) == list and query_data.request.data and len(query_data.request.data) > 0 and \
|
||||
'id' in query_data.request.data[0]:
|
||||
objs_list_ids = [obj['id'] for obj in query_data.request.data]
|
||||
elif response and response.data and type(response.data) == dict and 'id' in response.data:
|
||||
objs_list_ids = [response.data['id']]
|
||||
elif response and response.data and getattr(response.data.serializer, 'instance', None):
|
||||
objs_list_ids = [response.data.serializer.instance.id]
|
||||
elif response and response.data and 'id' in response.data and response.data['id']:
|
||||
objs_list_ids = [response.data['id']]
|
||||
elif query_data.request.data and 'id' in query_data.request.data:
|
||||
objs_list_ids = [query_data.request.data['id']]
|
||||
elif 'pk' in query_data.kwargs:
|
||||
objs_list_ids = [query_data.kwargs['pk']]
|
||||
elif query_data.queryset:
|
||||
objs_list_ids = query_data.queryset.values_list('id')
|
||||
else:
|
||||
return None
|
||||
|
||||
# if not objs_list_ids:
|
||||
#
|
||||
# serializer = query_data.serializer_class()
|
||||
# data = serializer(data=query_data.request.data)
|
||||
#
|
||||
# data_Dict = {
|
||||
# 'data': data,
|
||||
# 'DT': str(datetime.now()),
|
||||
# 'user': str(query_data.request.user),
|
||||
# 'oper_type': query_data.action,
|
||||
# 'init': init
|
||||
# }
|
||||
#
|
||||
# add_element_in_tmp_data_list('log', data_target, obj_id, data_Dict)
|
||||
|
||||
objs_list = query_data.queryset.filter(id__in=objs_list_ids)
|
||||
|
||||
cur_action = query_data.action
|
||||
query_data.action = 'retrieve'
|
||||
serializer = query_data.get_serializer_class()
|
||||
query_data.action = cur_action
|
||||
obj_data_list = serializer(objs_list, many=True)
|
||||
|
||||
elements_list_for_add_to_tmp_data = []
|
||||
for obj_data in obj_data_list.data:
|
||||
obj_id = obj_data['id']
|
||||
|
||||
# фиксим json-неподходящие поля
|
||||
for item_data in obj_data.keys():
|
||||
if type(obj_data[item_data]) not in (str, int, float, dict, list, bool):
|
||||
obj_data[item_data] = str(obj_data[item_data])
|
||||
|
||||
|
||||
# if init:
|
||||
# if check_exists_element_in_tmp_data_list('log', data_target, obj_id):
|
||||
# continue
|
||||
|
||||
data_Dict = {
|
||||
'id': obj_id,
|
||||
'data': obj_data,
|
||||
'DT': str(datetime.now()),
|
||||
'user': str(query_data.request.user),
|
||||
'oper_type': query_data.action,
|
||||
'init': init
|
||||
}
|
||||
|
||||
# add_element_in_tmp_data_list('log', data_target, obj_id, data_Dict)
|
||||
elements_list_for_add_to_tmp_data.append(data_Dict)
|
||||
|
||||
add_element_list_to_tmp_data('log', data_target, init, elements_list_for_add_to_tmp_data)
|
||||
|
||||
except Exception as e:
|
||||
response_data = ''
|
||||
if response and response.data:
|
||||
response_data = str(response.data)
|
||||
|
||||
msg = 'log_save_cur_state_obj fail save to log w data = {0}<br>{1}<br>{2}<br>response_data={3}'.format(
|
||||
str(e),
|
||||
'log - ' + str(data_target) + ' - ' + str(obj_id),
|
||||
str(data_Dict),
|
||||
response_data
|
||||
)
|
||||
techSendMail(msg)
|
||||
|
||||
return 'OK'
|
||||
|
||||
|
||||
|
||||
class APIViewSet_ModelClass(viewsets.ModelViewSet):
|
||||
# pass
|
||||
# # authentication_classes = (SessionAuthentication, BasicAuthentication, )#
|
||||
# permission_classes = (IsAuthenticated, )
|
||||
# # renderer_classes = [JSONCustomRenderer]
|
||||
# pagination_class = None
|
||||
|
||||
|
||||
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
|
||||
res = super(APIViewSet_ModelClass, self).initial(request, *args, **kwargs)
|
||||
if self.basename == 'alert' or not self.action in exclude_actions_for_logging:
|
||||
return res
|
||||
|
||||
if not self.action in create_kwargs:
|
||||
log_save_cur_state_obj(self, init=True)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def finalize_response(self, request, response, *args, **kwargs):
|
||||
|
||||
res = super(APIViewSet_ModelClass, self).finalize_response(request, response, *args, **kwargs)
|
||||
if self.basename == 'alert' or not self.action in exclude_actions_for_logging:
|
||||
return res
|
||||
|
||||
log_save_cur_state_obj(self, response=response)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
obj = super(APIViewSet_ModelClass, self).create(request, *args, **kwargs)
|
||||
|
||||
# data_Dict = {}
|
||||
# try:
|
||||
# data_Dict = {
|
||||
# 'data': prepare_data_for_json(vars(obj)),
|
||||
# 'DT': str(datetime.now()),
|
||||
# 'user': str(request.user),
|
||||
# 'oper_type': 'create'
|
||||
# }
|
||||
#
|
||||
# add_element_in_tmp_data_list('log', 'properties_log', obj.id, data_Dict)
|
||||
# except Exception as e:
|
||||
# msg = 'fail save to log w data = {0}<br>{1}'.format(
|
||||
# 'log - properties_log - ' + str(obj.id),
|
||||
# str(data_Dict)
|
||||
# )
|
||||
# techSendMail(msg)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
|
||||
def partial_update(self, request, *args, **kwargs):
|
||||
if request.data:
|
||||
request.data['modifiedDT'] = datetime.now()
|
||||
obj = super(APIViewSet_ModelClass, self).partial_update(request, *args, **kwargs)
|
||||
|
||||
# data_Dict = {}
|
||||
# try:
|
||||
# data_Dict = {
|
||||
# 'data': prepare_data_for_json(vars(obj)),
|
||||
# 'DT': str(datetime.now()),
|
||||
# 'user': str(request.user),
|
||||
# 'oper_type': 'create'
|
||||
# }
|
||||
#
|
||||
# add_element_in_tmp_data_list('log', 'properties_log', obj.id, data_Dict)
|
||||
# except Exception as e:
|
||||
# msg = 'fail save to log w data = {0}<br>{1}'.format(
|
||||
# 'log - properties_log - ' + str(obj.id),
|
||||
# str(data_Dict)
|
||||
# )
|
||||
# techSendMail(msg)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
class APIViewSet_ModelClass_w_Expenses(APIViewSet_ModelClass):
|
||||
|
||||
@action(methods=['GET'], detail=True)
|
||||
def expenses_rates(self, request, *args, **kwargs):
|
||||
from ExpensesApp.api.v1.expenses_rate.expenses_rate_api_serializers import ExpensesRate_get_Serializer
|
||||
model = self.serializer_class.Meta.model
|
||||
|
||||
try:
|
||||
obj = model.objects.get(id=kwargs['pk'])
|
||||
|
||||
except model.DoesNotExist:
|
||||
return Response({'error': u'ошибка получения expenses_rates'},
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
expenses_rates = obj.expenses_rates.all()
|
||||
serializer = ExpensesRate_get_Serializer(expenses_rates, many=True)
|
||||
|
||||
# if serializer.data:
|
||||
# return Response(serializer.data)
|
||||
|
||||
return Response(serializer.data)
|
||||
|
||||
@action(methods=['GET'], detail=True)
|
||||
def expenses_data(self, request, *args, **kwargs):
|
||||
from ExpensesApp.api.v1.expenses_data.expenses_data_api_serializers import ExpensesData_get_Serializer
|
||||
model = self.serializer_class.Meta.model
|
||||
|
||||
try:
|
||||
obj = model.objects.get(id=kwargs['pk'])
|
||||
|
||||
except model.DoesNotExist:
|
||||
return Response({'error': u'ошибка получения expenses_rates'},
|
||||
status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
expenses_data = obj.expenses_data.all()
|
||||
serializer = ExpensesData_get_Serializer(expenses_data, many=True)
|
||||
|
||||
# if serializer.data:
|
||||
# return Response(serializer.data)
|
||||
|
||||
return Response(serializer.data)
|
||||
|
||||
|
||||
class APIViewSet_BaseClass(viewsets.ViewSet):
|
||||
pass
|
||||
# authentication_classes = (SessionAuthentication, BasicAuthentication,) #
|
||||
permission_classes = (IsAuthenticated, )
|
||||
# renderer_classes = [JSONCustomRenderer]
|
||||
pagination_class = None
|
||||
|
||||
|
||||
# class APIBaseClass(generics.RetrieveAPIView):
|
||||
# authentication_classes = (SessionAuthentication, BasicAuthentication, )#
|
||||
# permission_classes = (IsAuthenticated,)
|
||||
# # renderer_classes = [JSONCustomRenderer]
|
||||
# pagination_class = None
|
||||
#
|
||||
#
|
||||
# class APIBaseSimplaClass(APIView):
|
||||
# authentication_classes = (SessionAuthentication, BasicAuthentication)
|
||||
# permission_classes = (IsAuthenticated,)
|
||||
# # renderer_classes = [JSONCustomRenderer]
|
||||
# pagination_class = None
|
||||
134
BaseModels/base_models.py
Normal file
134
BaseModels/base_models.py
Normal file
@@ -0,0 +1,134 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
__author__ = 'SDE'
|
||||
|
||||
from django.db import models
|
||||
from datetime import datetime
|
||||
# from ckeditor.fields import RichTextField
|
||||
# from BaseModels.pil_graphic_utils import *
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.db.models.signals import post_save, pre_save
|
||||
from django.utils.text import slugify
|
||||
from django.contrib.postgres.fields import JSONField
|
||||
# from ckeditor.fields import RichTextField
|
||||
from ckeditor_uploader.fields import RichTextUploadingField
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
|
||||
|
||||
# add_introspection_rules([], ["^tinymce\.models\.HTMLField"])
|
||||
|
||||
class BaseModel(models.Model):
|
||||
name = models.TextField(verbose_name=_('Название'),
|
||||
help_text=_('Название'), null=True, blank=True)
|
||||
name_plural = models.TextField(verbose_name=_('Название (множественное число)'),
|
||||
null=True, blank=True)
|
||||
order = models.IntegerField(verbose_name=_('Очередность отображения'), null=True, blank=True)
|
||||
createDT = models.DateTimeField(auto_now_add=True, verbose_name=_('Дата и время создания'))
|
||||
modifiedDT = models.DateTimeField(verbose_name=_('Дата и время последнего изменения'), null=True, blank=True)
|
||||
enable = models.BooleanField(verbose_name=_('Включено'), default=True, db_index=True)
|
||||
|
||||
json_data = models.JSONField(verbose_name=_('Дополнительные данные'), default=dict, blank=True)
|
||||
|
||||
def __str__(self):
|
||||
if self.name:
|
||||
return self.name
|
||||
else:
|
||||
return str(self.id)
|
||||
|
||||
def pop_node_by_name(self, node_name):
|
||||
if not self.json_data or not node_name in self.json_data:
|
||||
return None
|
||||
|
||||
res = self.json_data[node_name]
|
||||
del self.json_data[node_name]
|
||||
self.save(update_fields=['json_data'])
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def get_node_by_name(self, node_name):
|
||||
if not self.json_data or not node_name in self.json_data:
|
||||
return None
|
||||
|
||||
return self.json_data[node_name]
|
||||
|
||||
def add_node_to_json_data(self, node_data, save=False):
|
||||
if not self.json_data:
|
||||
self.json_data = {}
|
||||
if type(self.json_data) == dict:
|
||||
self.json_data.update(node_data)
|
||||
elif type(self.json_data) == list:
|
||||
self.json_data.append(node_data)
|
||||
|
||||
if save:
|
||||
self.save(update_fields=['json_data'])
|
||||
|
||||
return self.json_data
|
||||
|
||||
def save(self, not_change_modifiedDT=False, *args, **kwargs):
|
||||
if not not_change_modifiedDT:
|
||||
self.modifiedDT = datetime.now()
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
def preSave_BaseModel(sender, instance, **kwargs):
|
||||
if instance and instance.user_profile:
|
||||
instance.modifiedDT = datetime.now()
|
||||
|
||||
|
||||
pre_save.connect(preSave_BaseModel, sender=BaseModel, dispatch_uid='pre_save_connect')
|
||||
|
||||
|
||||
class BaseModelViewPage(BaseModel):
|
||||
url = models.TextField(verbose_name=_('URL привязанной страницы'), unique=True,
|
||||
help_text=_(
|
||||
'можно изменить адрес страницы (!!! ВНИМАНИЕ !!! поисковые системы потеряют страницу и найдут лишь спустя неделю...месяц)'))
|
||||
title = models.TextField(verbose_name=_('Заголовок'), null=True, blank=True)
|
||||
description = RichTextUploadingField(verbose_name=_('Краткое описание'), null=True, blank=True, # max_length=240,
|
||||
help_text=_('краткое описание страницы (до 240 символов)'))
|
||||
text = RichTextUploadingField(verbose_name=_('Полное описание'), null=True, blank=True, )
|
||||
# help_text=_(u'краткое описание страницы (до 240 символов)'))
|
||||
picture = models.ImageField(upload_to='uploads/', verbose_name=_('Картинка'), null=True, blank=True,
|
||||
help_text=u'')
|
||||
# icon = FileBrowseField("Image", max_length=200, directory="files/", extensions=[".jpg"], blank=True, null=True)
|
||||
visible = models.BooleanField(verbose_name=_('Отображать'), default=True)
|
||||
background_image_left = models.ImageField(verbose_name=_('Левая подложка'), blank=True, null=True)
|
||||
background_image_right = models.ImageField(verbose_name=_('Правая подложка'), blank=True, null=True)
|
||||
|
||||
|
||||
seo_title = models.CharField(max_length=250, verbose_name=_('Title (80 знаков)'), null=True, blank=True)
|
||||
seo_description = models.CharField(max_length=250, verbose_name=_('Description (150 знаков)'), null=True,
|
||||
blank=True)
|
||||
seo_keywords = models.CharField(max_length=250, verbose_name=_('Keywords (200 знаков)'), null=True, blank=True)
|
||||
seo_text = RichTextUploadingField(verbose_name=_(u'Текст SEO статьи'), null=True, blank=True)
|
||||
|
||||
FAQ_title = models.CharField(max_length=250, verbose_name=_(u'FAQ Заголовок'), null=True, blank=True)
|
||||
FAQ_items = GenericRelation('GeneralApp.FAQitem', related_query_name='grel_%(class)s_for_faq_item')
|
||||
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
def get_description_exists(self):
|
||||
if self.description:
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_text_exists(self):
|
||||
if self.text:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
# @receiver(pre_save, sender=User)
|
||||
def preSaveBaseModelViewPage(sender, instance, **kwargs):
|
||||
if not sender.url:
|
||||
sender.url = slugify(sender.name)
|
||||
|
||||
|
||||
pre_save.connect(preSaveBaseModelViewPage, sender=BaseModelViewPage, dispatch_uid='pre_save_connect')
|
||||
|
||||
|
||||
150
BaseModels/colors/generate_colors.py
Normal file
150
BaseModels/colors/generate_colors.py
Normal file
@@ -0,0 +1,150 @@
|
||||
from colorsys import hls_to_rgb, rgb_to_hls, rgb_to_hsv, hsv_to_rgb
|
||||
from random import uniform, randint
|
||||
|
||||
DEFAULT_LIGHTNESS = 0.5
|
||||
DEFAULT_SATURATION = 1
|
||||
DEFAULT_VARIANCE = 0.2
|
||||
|
||||
|
||||
def get_next_HSV_color(cur_color, offset_hue=0, offset_value=0, offset_saturation=0):
|
||||
red = int(cur_color[0:2], base=16)
|
||||
green = int(cur_color[2:4], base=16)
|
||||
blue = int(cur_color[4:6], base=16)
|
||||
|
||||
hue, saturation, value = rgb_to_hsv(red, green, blue)
|
||||
new_hue = hue + offset_hue
|
||||
new_value = value + offset_value
|
||||
new_saturation = saturation - offset_saturation
|
||||
|
||||
# new_hue = hue + offset_hue
|
||||
# new_lightness = lightness + offset_lightness
|
||||
# new_saturation = saturation + offset_saturation
|
||||
|
||||
# red, green, blue = map(
|
||||
# lambda v: int(v * 255),
|
||||
# hls_to_rgb(
|
||||
# new_hue,
|
||||
# new_lightness,
|
||||
# new_saturation,
|
||||
# ),
|
||||
# )
|
||||
|
||||
red, green, blue = hsv_to_rgb(new_hue, new_saturation, new_value)
|
||||
|
||||
# red, green, blue = hls_to_rgb(hue_variant, lightness, saturation)
|
||||
res = f"{int(red):02x}{int(green):02x}{int(blue):02x}"
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def get_next_color(cur_color, offset_hue=0, offset_lightness=0, offset_saturation=0):
|
||||
red = int(cur_color[0:2], base=16)
|
||||
green = int(cur_color[2:4], base=16)
|
||||
blue = int(cur_color[4:6], base=16)
|
||||
|
||||
hue, lightness, saturation = rgb_to_hls(red, green, blue)
|
||||
lightness = lightness / 255
|
||||
if saturation < 0.1:
|
||||
saturation = 1
|
||||
|
||||
new_hue = hue + offset_hue
|
||||
new_lightness = lightness + offset_lightness
|
||||
new_saturation = saturation + offset_saturation
|
||||
|
||||
if new_hue > 1: new_hue = offset_hue
|
||||
if new_hue < 0: new_hue = 1
|
||||
if new_lightness > 1: new_lightness = offset_lightness
|
||||
if new_lightness < 0: new_lightness = 1
|
||||
if new_saturation > 1: new_saturation = offset_saturation
|
||||
if new_saturation < 0: new_saturation = 1
|
||||
|
||||
red, green, blue = map(
|
||||
lambda v: int(v * 255),
|
||||
hls_to_rgb(
|
||||
new_hue,
|
||||
new_lightness,
|
||||
new_saturation,
|
||||
),
|
||||
)
|
||||
|
||||
res = f"{red:02x}{green:02x}{blue:02x}"
|
||||
|
||||
return res
|
||||
|
||||
|
||||
class Huetify(object):
|
||||
lightness: float
|
||||
saturation: float
|
||||
variance: float
|
||||
half_variance: float
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
lightness=DEFAULT_LIGHTNESS,
|
||||
saturation=DEFAULT_SATURATION,
|
||||
variance=DEFAULT_VARIANCE,
|
||||
) -> None:
|
||||
self.lightness = lightness
|
||||
self.saturation = saturation
|
||||
self.variance = variance
|
||||
self.half_variance = variance / 2.0
|
||||
|
||||
def huetify_to_rgb_hex(self, hue) -> str:
|
||||
hue_variant = uniform(
|
||||
hue - self.half_variance,
|
||||
hue + self.half_variance,
|
||||
)
|
||||
red, green, blue = map(
|
||||
lambda v: int(v * 255),
|
||||
hls_to_rgb(
|
||||
hue_variant,
|
||||
self.lightness,
|
||||
self.saturation,
|
||||
),
|
||||
)
|
||||
return f"{red:02x}{green:02x}{blue:02x}"
|
||||
|
||||
def huetify_next_variant_to_rgb_hex(self, cur_variant):
|
||||
hue_variant = cur_variant + self.half_variance
|
||||
red, green, blue = map(
|
||||
lambda v: int(v * 255),
|
||||
hls_to_rgb(
|
||||
hue_variant,
|
||||
self.lightness,
|
||||
self.saturation,
|
||||
),
|
||||
)
|
||||
return red, green, blue
|
||||
|
||||
@property
|
||||
def reddish(self):
|
||||
return self.huetify_to_rgb_hex(0)
|
||||
|
||||
@property
|
||||
def greenish(self):
|
||||
return self.huetify_to_rgb_hex(0.333)
|
||||
|
||||
@property
|
||||
def blueish(self):
|
||||
return self.huetify_to_rgb_hex(0.666)
|
||||
|
||||
def blue_colors(self, cur_variant=None):
|
||||
if not cur_variant:
|
||||
cur_variant = 0.666 - self.half_variance
|
||||
return self.huetify_next_variant_to_rgb_hex(cur_variant=cur_variant)
|
||||
|
||||
@property
|
||||
def yellowish(self):
|
||||
return self.huetify_to_rgb_hex(0.166)
|
||||
|
||||
@property
|
||||
def random_color(self):
|
||||
ch = randint(1, 4)
|
||||
if ch == 1:
|
||||
return self.reddish
|
||||
elif ch == 2:
|
||||
return self.greenish
|
||||
elif ch == 3:
|
||||
return self.greenish
|
||||
else:
|
||||
return self.yellowish
|
||||
0
BaseModels/currency_exchange/__init__.py
Normal file
0
BaseModels/currency_exchange/__init__.py
Normal file
108
BaseModels/currency_exchange/alfabank_api/alfabank_api_funcs.py
Normal file
108
BaseModels/currency_exchange/alfabank_api/alfabank_api_funcs.py
Normal file
@@ -0,0 +1,108 @@
|
||||
import requests
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from BaseModels.mailSender import techSendMail
|
||||
from GeneralApp.temp_data_funcs import *
|
||||
|
||||
|
||||
def get_alfabank_nb_rate_by_currency_code(code, date=None):
|
||||
rate = None
|
||||
res = None
|
||||
req_str = None
|
||||
|
||||
try:
|
||||
|
||||
msg = f'get_alfabank_nb_rate_by_currency_code'
|
||||
print(msg)
|
||||
|
||||
int_code = None
|
||||
if code == 'USD':
|
||||
int_code = 840
|
||||
elif code == 'EUR':
|
||||
int_code = 978
|
||||
elif code == 'RUB':
|
||||
int_code = 643
|
||||
|
||||
code_str = ''
|
||||
if int_code:
|
||||
code_str = f'?currencyCode={int_code}'
|
||||
|
||||
date_str = ''
|
||||
if date:
|
||||
date_str = f'date={datetime.now().strftime("%d.%m.%Y")}'
|
||||
if int_code:
|
||||
date_str = f'&{date_str}'
|
||||
else:
|
||||
date_str = f'?{date_str}'
|
||||
|
||||
req_str = f'https://developerhub.alfabank.by:8273/partner/1.0.1/public/nationalRates{code_str}{date_str}'
|
||||
|
||||
try:
|
||||
msg = f'GET {req_str}'
|
||||
print(msg)
|
||||
res = requests.get(req_str)
|
||||
msg = f'answer received = {str(res)}'
|
||||
print(msg)
|
||||
except Exception as e:
|
||||
msg = f'Exception GET {req_str} = {str(e)} ({str(res)})'
|
||||
print(msg)
|
||||
res = None
|
||||
|
||||
if res:
|
||||
|
||||
# if not res and res != 200:
|
||||
# if tmp_rec:
|
||||
# rate = tmp_rec.json_data['rate']
|
||||
# else:
|
||||
# rate_Dict = {
|
||||
# 'rate': 1,
|
||||
# 'DT': datetime.now().strftime('%d.%m.%Y %H:%M')
|
||||
# }
|
||||
# create_or_update_tmp_data('currency_rate', code, rate_Dict)
|
||||
# rate = 1
|
||||
#
|
||||
# msg = '<b style="color : red;">!!!!! --- get_alfabank_nbrb_rate_by_currency_code requests GET error={0}</b><br>{1}<br>{2}<br>rate set = {3}'.format(
|
||||
# str(e),
|
||||
# str(res),
|
||||
# str(req_str),
|
||||
# str(rate)
|
||||
# )
|
||||
# print(msg)
|
||||
# techSendMail(msg, 'tE get_alfabank_nbrb_rate_by_currency_code error')
|
||||
|
||||
data = json.loads(res.content)
|
||||
|
||||
for item in data['rates']:
|
||||
if item['iso'].upper() == code.upper():
|
||||
rate = item['rate'] / item['quantity']
|
||||
|
||||
rate_Dict = {
|
||||
'rate': rate,
|
||||
'DT': datetime.now().strftime('%d.%m.%Y %H:%M')
|
||||
}
|
||||
|
||||
create_or_update_tmp_data('currency_rate', code, rate_Dict)
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
msg = '<b style="color : red;">!!!!! --- get_alfabank_nb_rate_by_currency_code error={0}</b><br>{1}<br>{2}'.format(
|
||||
str(e),
|
||||
str(res),
|
||||
str(req_str)
|
||||
)
|
||||
print(msg)
|
||||
techSendMail(msg, 'tE get_alfabank_nb_rate_by_currency_code error')
|
||||
|
||||
# if not res:
|
||||
# rate_Dict = {
|
||||
# 'rate': 1,
|
||||
# 'DT': datetime.now().strftime('%d.%m.%Y %H:%M')
|
||||
# }
|
||||
# create_or_update_tmp_data('currency_rate', code, rate_Dict)
|
||||
# return 1
|
||||
|
||||
# if rate:
|
||||
msg = f'get alfabank nb {code} rate = {str(rate)}'
|
||||
print(msg)
|
||||
|
||||
return rate
|
||||
51
BaseModels/currency_exchange/funcs.py
Normal file
51
BaseModels/currency_exchange/funcs.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import requests
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from BaseModels.mailSender import techSendMail
|
||||
from GeneralApp.temp_data_funcs import *
|
||||
|
||||
|
||||
def get_rate_nb_by_currency_code(code, date=None):
|
||||
from .nbrb.nbrb_currency_exchange import get_nbrb_rate_by_currency_code
|
||||
from .alfabank_api.alfabank_api_funcs import get_alfabank_nb_rate_by_currency_code
|
||||
|
||||
if code == 'BYN':
|
||||
return 1
|
||||
|
||||
rate = None
|
||||
request_required = True
|
||||
|
||||
try:
|
||||
|
||||
tmp_rec = get_tmp_data('currency_rate', code)
|
||||
if tmp_rec and tmp_rec.json_data:
|
||||
if 'rate' in tmp_rec.json_data:
|
||||
# если с момента последнего импорта прошло меньше 30 минут - забираем курс из базы
|
||||
if datetime.strptime(tmp_rec.json_data['DT'], '%d.%m.%Y %H:%M') + timedelta(
|
||||
minutes=30) > datetime.now():
|
||||
rate = tmp_rec.json_data['rate']
|
||||
|
||||
if not rate:
|
||||
# если с последней попытки меньше минуты - отдаем старый курс или None
|
||||
if tmp_rec.modifiedDT + timedelta(minutes=5) > datetime.now():
|
||||
if 'rate' in tmp_rec.json_data:
|
||||
rate = tmp_rec.json_data['rate']
|
||||
else:
|
||||
request_required = False
|
||||
|
||||
if request_required:
|
||||
if not rate:
|
||||
rate = get_alfabank_nb_rate_by_currency_code(code)
|
||||
|
||||
# if not rate:
|
||||
# rate = get_nbrb_rate_by_currency_code(code)
|
||||
|
||||
tmp_rec.modifiedDT = datetime.now()
|
||||
tmp_rec.save()
|
||||
|
||||
except Exception as e:
|
||||
msg = f'<b style="color : red;">!!!!! --- get_rate_nb_by_currency_code error={str(e)}</b>'
|
||||
print(msg)
|
||||
techSendMail(msg, 'tE get_rate_nb_by_currency_code error')
|
||||
|
||||
return rate
|
||||
0
BaseModels/currency_exchange/nbrb/__init__.py
Normal file
0
BaseModels/currency_exchange/nbrb/__init__.py
Normal file
115
BaseModels/currency_exchange/nbrb/nbrb_currency_exchange.py
Normal file
115
BaseModels/currency_exchange/nbrb/nbrb_currency_exchange.py
Normal file
@@ -0,0 +1,115 @@
|
||||
import requests
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from BaseModels.mailSender import techSendMail
|
||||
from GeneralApp.temp_data_funcs import *
|
||||
|
||||
|
||||
def get_nbrb_currency_id_by_currency_code(code):
|
||||
data = requests.get('https://www.nbrb.by/api/exrates/currencies')
|
||||
|
||||
json_data = json.loads(data.content)
|
||||
|
||||
for item in json_data:
|
||||
if 'Cur_Abbreviation' in item and item['Cur_Abbreviation'] == code:
|
||||
return item['Cur_Code']
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_nbrb_rate_by_currency_code(code, date=None):
|
||||
# if code == 'BYN':
|
||||
# return 1
|
||||
#
|
||||
# tmp_rec = get_tmp_data('currency_rate', code)
|
||||
# if tmp_rec and tmp_rec.json_data:
|
||||
# # если с момента последнего импорта прошло меньше 30 минут - забираем курс из базы
|
||||
# if datetime.strptime(tmp_rec.json_data['DT'], '%d.%m.%Y %H:%M') + timedelta(minutes=30) > datetime.now():
|
||||
# return tmp_rec.json_data['rate']
|
||||
|
||||
# currency_id = get_nbrb_currency_id_by_currency_code('USD')
|
||||
rate = None
|
||||
res = None
|
||||
req_str = None
|
||||
|
||||
try:
|
||||
|
||||
msg = f'get_nbrb_rate_by_currency_code'
|
||||
print(msg)
|
||||
|
||||
if not date:
|
||||
# data = requests.get('https://www.nbrb.by/API/ExRates/Rates/{0}?Periodicity=0'.format(str(currency_id)))
|
||||
req_str = 'https://www.nbrb.by/api/exrates/rates/{0}?parammode=2'.format(str(code))
|
||||
else:
|
||||
date_str = datetime.now().strftime('%Y-%m-%d')
|
||||
date_str = date_str.replace('-0', '-')
|
||||
req_str = 'https://www.nbrb.by/api/exrates/rates/{0}?parammode=2&ondate={1}'.format(
|
||||
str(code),
|
||||
date_str
|
||||
)
|
||||
e = None
|
||||
try:
|
||||
msg = f'GET {req_str}'
|
||||
print(msg)
|
||||
res = requests.get(req_str, timeout=3)
|
||||
msg = f'answer received = {str(res)}'
|
||||
print(msg)
|
||||
except Exception as e:
|
||||
msg = f'Exception GET {req_str} = {str(e)} ({str(res)})'
|
||||
print(msg)
|
||||
res = None
|
||||
|
||||
if not res and res != 200:
|
||||
# if tmp_rec:
|
||||
# rate = tmp_rec.json_data['rate']
|
||||
# else:
|
||||
# rate_Dict = {
|
||||
# 'rate': 1,
|
||||
# 'DT': datetime.now().strftime('%d.%m.%Y %H:%M')
|
||||
# }
|
||||
# create_or_update_tmp_data('currency_rate', code, rate_Dict)
|
||||
# rate = 1
|
||||
|
||||
msg = '<b style="color : red;">!!!!! --- get_nbrb_rate_by_currency_code requests GET error={0}</b><br>{1}<br>{2}<br>rate set = {3}'.format(
|
||||
str(e),
|
||||
str(res),
|
||||
str(req_str),
|
||||
str(rate)
|
||||
)
|
||||
print(msg)
|
||||
techSendMail(msg, 'tE get_nbrb_rate_by_currency_code error')
|
||||
|
||||
data = json.loads(res.content)
|
||||
|
||||
if data and 'Cur_OfficialRate' in data and 'Cur_Scale' in data:
|
||||
rate = data['Cur_OfficialRate'] / data['Cur_Scale']
|
||||
|
||||
rate_Dict = {
|
||||
'rate': rate,
|
||||
'DT': datetime.now().strftime('%d.%m.%Y %H:%M')
|
||||
}
|
||||
|
||||
create_or_update_tmp_data('currency_rate', code, rate_Dict)
|
||||
|
||||
except Exception as e:
|
||||
msg = '<b style="color : red;">!!!!! --- get_nbrb_rate_by_currency_code error={0}</b><br>{1}<br>{2}'.format(
|
||||
str(e),
|
||||
str(res),
|
||||
str(req_str)
|
||||
)
|
||||
print(msg)
|
||||
techSendMail(msg, 'tE get_nbrb_rate_by_currency_code error')
|
||||
|
||||
# if not res:
|
||||
# rate_Dict = {
|
||||
# 'rate': 1,
|
||||
# 'DT': datetime.now().strftime('%d.%m.%Y %H:%M')
|
||||
# }
|
||||
# create_or_update_tmp_data('currency_rate', code, rate_Dict)
|
||||
# return 1
|
||||
|
||||
if rate:
|
||||
msg = f'get nbrb nb rate = {rate}'
|
||||
print(msg)
|
||||
|
||||
return rate
|
||||
35
BaseModels/decorators.py
Normal file
35
BaseModels/decorators.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from django.http import HttpResponse, JsonResponse
|
||||
import json
|
||||
|
||||
# _make_result = lambda result: HttpResponse(json.dumps(result), mimetype='application/json')
|
||||
_make_result = lambda result: JsonResponse(result)
|
||||
|
||||
|
||||
def jsonifydata():
|
||||
def decorator(func):
|
||||
def wrapper(request, *args, **kwargs):
|
||||
result = func(request, *args, **kwargs)
|
||||
return HttpResponse(json.dumps(result), mimetype='application/json')
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def jsonify(validation_form=None):
|
||||
|
||||
def decorator(func):
|
||||
|
||||
def wrapper(request, *args, **kwargs):
|
||||
|
||||
if not validation_form is None:
|
||||
form = validation_form(data=request.POST, files=request.FILES)
|
||||
|
||||
if form.is_valid():
|
||||
request.form_data = form.cleaned_data
|
||||
else:
|
||||
return _make_result({'result': False, 'errors': form.errors})
|
||||
|
||||
# return _make_result({'result': func(request, *args, **kwargs)})
|
||||
return _make_result(func(request, *args, **kwargs))
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
67
BaseModels/error_processing.py
Normal file
67
BaseModels/error_processing.py
Normal file
@@ -0,0 +1,67 @@
|
||||
from tEDataProj.settings import EXCEPTION_IMPORT_LOG_PATH, EXCEPTION_LOG_PATH
|
||||
import codecs
|
||||
from datetime import datetime
|
||||
|
||||
def open_log_file(message, filename=None, import_exc=False):
|
||||
|
||||
if not filename:
|
||||
if import_exc:
|
||||
filename = u'import_errors.log'
|
||||
else:
|
||||
filename = u'errors.log'
|
||||
|
||||
if import_exc:
|
||||
path = EXCEPTION_IMPORT_LOG_PATH
|
||||
else:
|
||||
path = EXCEPTION_LOG_PATH
|
||||
|
||||
f = codecs.open(path + filename, 'a', "utf-8")
|
||||
|
||||
msg = u'{0} - {1}\n---------------------------\n\n'.format(
|
||||
str(datetime.now()),
|
||||
message
|
||||
)
|
||||
f.write(msg)
|
||||
|
||||
return f
|
||||
|
||||
|
||||
def close_log_file(f, message):
|
||||
|
||||
msg = u'---------------------------\n{0} - {1}\n\n'.format(
|
||||
str(datetime.now()),
|
||||
message
|
||||
)
|
||||
f.write(msg)
|
||||
|
||||
f.close()
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def save_log_string(f, exc_data):
|
||||
|
||||
msg = u'- {0} - {1} ({2})\n{3}\n'.format(
|
||||
str(datetime.now()),
|
||||
exc_data['err_code'],
|
||||
exc_data['err_text'],
|
||||
exc_data['err_data'],
|
||||
)
|
||||
|
||||
|
||||
f.write(msg)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def generate_error(f, err_code, err_text, err_data):
|
||||
|
||||
exc_data = {
|
||||
'err_code' : err_code,
|
||||
'err_text' : err_text,
|
||||
'err_data' : err_data
|
||||
}
|
||||
|
||||
save_log_string(f, exc_data)
|
||||
|
||||
return exc_data
|
||||
537
BaseModels/functions.py
Normal file
537
BaseModels/functions.py
Normal file
@@ -0,0 +1,537 @@
|
||||
## -*- coding: utf-8 -*-
|
||||
__author__ = 'SDE'
|
||||
|
||||
from django.utils.html import strip_tags
|
||||
# from uuslug import slugify
|
||||
import json
|
||||
import os.path
|
||||
from PIL import Image
|
||||
from django.core.files.uploadedfile import InMemoryUploadedFile
|
||||
from BaseModels.mailSender import techSendMail
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
def get_near_work_day(DT):
|
||||
if DT.isoweekday() < 6:
|
||||
return DT
|
||||
|
||||
return DT + timedelta(days=8 - DT.isoweekday())
|
||||
|
||||
|
||||
def get_next_DT_for_monthes_delta_great(monthes_delta, fromDT=datetime.now()):
|
||||
DT = fromDT
|
||||
i = 0
|
||||
|
||||
cur_month = DT.month
|
||||
|
||||
while cur_month == DT.month:
|
||||
DT = DT + timedelta(days=1)
|
||||
|
||||
# подбираем ближайший день, существующий в месяце
|
||||
fail = True
|
||||
i = 0
|
||||
while fail:
|
||||
try:
|
||||
DT = DT.replace(day=fromDT.day - i)
|
||||
fail = False
|
||||
except:
|
||||
i += 1
|
||||
# DT = DT - timedelta(days=1)
|
||||
# DT = DT.replace(hour=23, minute=59, second=59)
|
||||
|
||||
return DT
|
||||
|
||||
|
||||
def get_prev_DT_for_monthes_delta_less(monthes_delta, fromDT=datetime.now()):
|
||||
DT = fromDT
|
||||
i = 0
|
||||
|
||||
while i < monthes_delta:
|
||||
DT = DT.replace(day=1)
|
||||
DT = DT - timedelta(days=1)
|
||||
i += 1
|
||||
|
||||
# подбираем ближайший день, существующий в месяце
|
||||
fail = True
|
||||
i = 0
|
||||
while fail:
|
||||
try:
|
||||
DT = DT.replace(day=fromDT.day - i)
|
||||
fail = False
|
||||
except:
|
||||
i += 1
|
||||
# DT = DT - timedelta(days=1)
|
||||
# DT = DT.replace(hour=23, minute=59, second=59)
|
||||
|
||||
return DT
|
||||
|
||||
|
||||
def correct_filter_name_for_filter_and_create(filter_kwargs):
|
||||
filter_Dict = {}
|
||||
create_Dict = {}
|
||||
|
||||
filter_Dict.update(filter_kwargs)
|
||||
create_Dict.update(filter_kwargs)
|
||||
|
||||
if 'name' in filter_kwargs:
|
||||
filter_Dict['name__iexact'] = filter_kwargs['name']
|
||||
del filter_Dict['name']
|
||||
|
||||
if 'id' in filter_kwargs:
|
||||
del filter_Dict['id']
|
||||
del create_Dict['id']
|
||||
|
||||
return filter_Dict, create_Dict
|
||||
|
||||
|
||||
def date_range_as_Dict(start_date, end_date):
|
||||
import datetime
|
||||
# for ordinal in range(start_date.toordinal(), end_date.toordinal()):
|
||||
# yield datetime.date.fromordinal(ordinal)
|
||||
|
||||
return [{start_date + datetime.timedelta(n): {}} for n in range(int((end_date - start_date).days) + 1)]
|
||||
|
||||
|
||||
def sortByLength(inputStr):
|
||||
return len(inputStr)
|
||||
|
||||
|
||||
def add_domain(request, url, add_lang=False):
|
||||
domain = get_domain_by_request(request)
|
||||
if add_lang:
|
||||
cur_lang = get_cur_lang_by_request(request)
|
||||
return '{0}/{1}/{2}'.format(domain, cur_lang, url)
|
||||
else:
|
||||
return '{0}{1}'.format(domain, url)
|
||||
|
||||
|
||||
def get_domain_by_request(request):
|
||||
from project_sets import domain
|
||||
if request.query_params and 'domain' in request.query_params:
|
||||
return request.query_params['domain']
|
||||
return domain
|
||||
|
||||
|
||||
def get_cur_lang_by_request(request):
|
||||
from project_sets import lang
|
||||
if request.query_params and 'cur_lang' in request.query_params:
|
||||
return request.query_params['cur_lang']
|
||||
return lang
|
||||
|
||||
|
||||
def get_img_type_by_request(request):
|
||||
if request.query_params and 'img_type' in request.query_params:
|
||||
return request.query_params['img_type']
|
||||
|
||||
return 'webp'
|
||||
|
||||
|
||||
def image_convert_to_png(photo_file, save_file_path=None):
|
||||
from io import BytesIO
|
||||
from PIL import Image as Img
|
||||
print('image_convert_to_png')
|
||||
|
||||
try:
|
||||
|
||||
fn_list = photo_file.name.split('.')
|
||||
if len(fn_list) > 1:
|
||||
fp = fn_list[0] + '.png'
|
||||
else:
|
||||
fp = photo_file.name + '.png'
|
||||
|
||||
image = Img.open(photo_file)
|
||||
|
||||
print('photo was uploaded')
|
||||
|
||||
try:
|
||||
image.convert("RGB")
|
||||
print('photo was converted to RGB')
|
||||
except:
|
||||
print('!!! fail convert photo to RGB')
|
||||
|
||||
if save_file_path:
|
||||
image.save(save_file_path, format="PNG")
|
||||
|
||||
print('photo was saved')
|
||||
|
||||
fileBytes = BytesIO()
|
||||
image.save(fileBytes, format="PNG")
|
||||
print('photo was preparing for streaming')
|
||||
|
||||
memoryFile = InMemoryUploadedFile(fileBytes, None, fp, 'image/png', 1, None)
|
||||
|
||||
return memoryFile
|
||||
|
||||
except Exception as e:
|
||||
msg = 'image_convert_to_png error={0}'.format(str(e))
|
||||
print(msg)
|
||||
techSendMail(msg, 'image_convert_to_png error')
|
||||
return {'error': msg}
|
||||
|
||||
|
||||
def image_convert_to_webP(photo_file, save_file_path=None):
|
||||
from io import BytesIO
|
||||
from PIL import Image as Img
|
||||
|
||||
fn_list = photo_file.name.split('.')
|
||||
if len(fn_list) > 1:
|
||||
webP_fp = fn_list[0] + '.webp'
|
||||
else:
|
||||
webP_fp = photo_file.name + '.webp'
|
||||
|
||||
image = Img.open(photo_file)
|
||||
|
||||
image.convert("RGB")
|
||||
|
||||
if save_file_path:
|
||||
image.save(save_file_path, format="WEBP")
|
||||
|
||||
fileBytes = BytesIO()
|
||||
image.save(fileBytes, format="WEBP")
|
||||
memoryFile = InMemoryUploadedFile(fileBytes, None, webP_fp, 'image/webp', 1, None)
|
||||
|
||||
return memoryFile
|
||||
|
||||
|
||||
def get_thumb_path(full_filepath, img_type):
|
||||
if img_type == 'webp':
|
||||
convert_to_webP = True
|
||||
else:
|
||||
convert_to_webP = False
|
||||
|
||||
icon_path = None
|
||||
|
||||
full_filepath = full_filepath.replace('\\', '/')
|
||||
|
||||
if not os.path.exists(full_filepath):
|
||||
return None
|
||||
|
||||
path_list = full_filepath.split('/')
|
||||
filename = path_list[-1]
|
||||
filepath = '/'.join(path_list[:-1])
|
||||
|
||||
if convert_to_webP:
|
||||
fn_list = filename.split('.')
|
||||
if len(fn_list) > 1:
|
||||
filename = fn_list[0] + '.webp'
|
||||
else:
|
||||
filename = filename + '.webp'
|
||||
|
||||
icon_path = '{0}/icon-{1}'.format(filepath, filename)
|
||||
|
||||
if not os.path.exists(icon_path):
|
||||
size = (300, 300)
|
||||
img = Image.open(full_filepath)
|
||||
if convert_to_webP:
|
||||
img.convert("RGB")
|
||||
img.thumbnail(size)
|
||||
if convert_to_webP:
|
||||
img.save(icon_path, 'WEBP')
|
||||
else:
|
||||
img.save(icon_path)
|
||||
|
||||
return icon_path
|
||||
|
||||
|
||||
def get_filename_from_path(filepath, wo_ext=False):
|
||||
f_list = filepath.split('/')
|
||||
if len(f_list) > 1:
|
||||
filename = f_list[-1]
|
||||
else:
|
||||
filename = f_list[0]
|
||||
|
||||
f_list = filename.split('\\')
|
||||
if len(f_list) > 1:
|
||||
filename = f_list[-1]
|
||||
else:
|
||||
filename = f_list[0]
|
||||
|
||||
if filename and wo_ext:
|
||||
f_list = filename.split('.')
|
||||
filename = f_list[0]
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
def get_free_filename(filename, filepath):
|
||||
from os import path, access, R_OK # W_OK for write permission.
|
||||
|
||||
full_path = filepath + filename
|
||||
|
||||
i = 0
|
||||
while path.exists(full_path) and path.isfile(full_path) and access(full_path, R_OK):
|
||||
i += 1
|
||||
full_path = filepath + filename + '-{0}'.format(str(i))
|
||||
|
||||
return full_path
|
||||
|
||||
|
||||
def url_translit(value):
|
||||
value = translit(value).lower()
|
||||
# value = slugify_text(value).lower()
|
||||
# value = value.replace(u',', u'-')
|
||||
# value = value.replace(u'.', u'-')
|
||||
# value = value.replace(u'_', u'-')
|
||||
# value = value.replace(u'"', u'')
|
||||
# value = value.replace(u'“', u'')
|
||||
# value = value.replace(u'”', u'')
|
||||
# value = value.replace(u"'", u'')
|
||||
# value = value.replace(u'/', u'-')
|
||||
# value = value.replace(u'\\', u'-')
|
||||
# value = value.replace(u'(', u'')
|
||||
# value = value.replace(u')', u'')
|
||||
# value = value.replace(u'&', u'-and-')
|
||||
# value = value.replace(u' ', u'-')
|
||||
# value = value.replace(u'%', u'')
|
||||
# value = value.replace(u'*', u'-')
|
||||
# value = value.replace(u'±', u'-')
|
||||
|
||||
allow_symbols = '0123456789abcdefghijklmnopqrstuvwxyz-'
|
||||
i = 0
|
||||
while i < len(value):
|
||||
if not value[i] in allow_symbols:
|
||||
value = value.replace(value[i], '-')
|
||||
|
||||
i += 1
|
||||
|
||||
while '--' in value:
|
||||
value = value.replace(u'--', u'-')
|
||||
|
||||
if value[len(value) - 1] == '-':
|
||||
value = value[:-1]
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def translit(locallangstring):
|
||||
conversion = {
|
||||
u'\u0410': 'A', u'\u0430': 'a',
|
||||
u'\u0411': 'B', u'\u0431': 'b',
|
||||
u'\u0412': 'V', u'\u0432': 'v',
|
||||
u'\u0413': 'G', u'\u0433': 'g',
|
||||
u'\u0414': 'D', u'\u0434': 'd',
|
||||
u'\u0415': 'E', u'\u0435': 'e',
|
||||
u'\u0401': 'Yo', u'\u0451': 'yo',
|
||||
u'\u0416': 'Zh', u'\u0436': 'zh',
|
||||
u'\u0417': 'Z', u'\u0437': 'z',
|
||||
u'\u0418': 'I', u'\u0438': 'i',
|
||||
u'\u0419': 'Y', u'\u0439': 'y',
|
||||
u'\u041a': 'K', u'\u043a': 'k',
|
||||
u'\u041b': 'L', u'\u043b': 'l',
|
||||
u'\u041c': 'M', u'\u043c': 'm',
|
||||
u'\u041d': 'N', u'\u043d': 'n',
|
||||
u'\u041e': 'O', u'\u043e': 'o',
|
||||
u'\u041f': 'P', u'\u043f': 'p',
|
||||
u'\u0420': 'R', u'\u0440': 'r',
|
||||
u'\u0421': 'S', u'\u0441': 's',
|
||||
u'\u0422': 'T', u'\u0442': 't',
|
||||
u'\u0423': 'U', u'\u0443': 'u',
|
||||
u'\u0424': 'F', u'\u0444': 'f',
|
||||
u'\u0425': 'H', u'\u0445': 'h',
|
||||
u'\u0426': 'Ts', u'\u0446': 'ts',
|
||||
u'\u0427': 'Ch', u'\u0447': 'ch',
|
||||
u'\u0428': 'Sh', u'\u0448': 'sh',
|
||||
u'\u0429': 'Sch', u'\u0449': 'sch',
|
||||
u'\u042a': '', u'\u044a': '',
|
||||
u'\u042b': 'Y', u'\u044b': 'y',
|
||||
u'\u042c': '', u'\u044c': '',
|
||||
u'\u042d': 'E', u'\u044d': 'e',
|
||||
u'\u042e': 'Yu', u'\u044e': 'yu',
|
||||
u'\u042f': 'Ya', u'\u044f': 'ya',
|
||||
u'№': 'no',
|
||||
}
|
||||
translitstring = []
|
||||
for c in locallangstring:
|
||||
translitstring.append(conversion.setdefault(c, c))
|
||||
return ''.join(translitstring)
|
||||
|
||||
|
||||
def slugify_text(str_text):
|
||||
utf8_code = False
|
||||
try:
|
||||
str_text = str_text.encode('utf-8').decode('utf-8')
|
||||
utf8_code = True
|
||||
except:
|
||||
pass
|
||||
|
||||
if utf8_code == False:
|
||||
try:
|
||||
str_text = str_text.decode('utf-8')
|
||||
except:
|
||||
pass
|
||||
|
||||
str_text = del_bad_symbols(str_text)
|
||||
|
||||
str_text = str_text.replace(u'"', u'')
|
||||
str_text = str_text.replace(u"'", u'')
|
||||
str_text = str_text.replace(u".", u'')
|
||||
str_text = str_text.replace(u",", u'')
|
||||
str_text = str_text.replace(u" -", u'-')
|
||||
str_text = str_text.replace(u"- ", u'-')
|
||||
str_text = str_text.replace(u"„", u'')
|
||||
str_text = str_text.replace(u"(", u'')
|
||||
str_text = str_text.replace(u")", u'')
|
||||
str_text = str_text.replace(u"{", u'')
|
||||
str_text = str_text.replace(u"}", u'')
|
||||
str_text = str_text.replace(u"<", u'')
|
||||
str_text = str_text.replace(u">", u'')
|
||||
|
||||
str = translit(str_text)
|
||||
str = translit(str)
|
||||
if len(str) < 2 or len(str) + 3 < len(str_text):
|
||||
str = translit(str_text)
|
||||
str = translit(str)
|
||||
|
||||
str = str.replace(u"'", u'')
|
||||
str = str.replace(u'"', u'')
|
||||
|
||||
if len(str) < 2:
|
||||
str = u''
|
||||
return str
|
||||
|
||||
|
||||
def get_price_from_string_w_del_tails(string):
|
||||
string = del_bad_symbols(string)
|
||||
|
||||
while string.find(' ') > -1:
|
||||
string = string.replace(' ', '')
|
||||
string = string.replace(u'$', '')
|
||||
string = string.replace(u'USD', '')
|
||||
string = string.replace(u'Br', '')
|
||||
string = string.replace(u'руб.', '')
|
||||
string = string.replace(u',', '.')
|
||||
|
||||
return string
|
||||
|
||||
|
||||
def kill_pretexts(txt):
|
||||
pretexts = [
|
||||
'в', 'без', 'до', 'из', 'к', 'на', 'по', 'о', 'от', 'перед', 'при', 'через', 'с', 'у', 'за', 'над',
|
||||
'об', 'под', 'про', 'для'
|
||||
]
|
||||
|
||||
words = txt.split(' ')
|
||||
words = [item for item in words if not item in pretexts]
|
||||
|
||||
return ' '.join(words)
|
||||
|
||||
|
||||
def stay_only_text_and_numbers(txt):
|
||||
bad_symbols = '"~`{}[]|!@#$%^&*()_+№;:?= '
|
||||
nums = '0123456789'
|
||||
|
||||
for symbol in bad_symbols:
|
||||
txt = txt.replace(symbol, ' ')
|
||||
|
||||
symbols_for_check = ',.'
|
||||
i = 0
|
||||
while i < len(txt):
|
||||
if txt[i] in ['.', ',']:
|
||||
if i < 1 or not txt[i - 1] in nums or i == len(txt) - 1 or not txt[i + 1] in nums:
|
||||
txt_list = list(txt)
|
||||
txt_list[i] = ' '
|
||||
txt = ''.join(txt_list)
|
||||
# if txt[i] in ['"']:
|
||||
# if i < 1 or not txt[i - 1] in nums:
|
||||
# txt_list = list(txt)
|
||||
# txt_list[i] = ' '
|
||||
# txt = ''.join(txt_list)
|
||||
|
||||
i += 1
|
||||
|
||||
txt = txt.strip()
|
||||
while ' ' in txt:
|
||||
txt = txt.replace(' ', ' ')
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
def del_bad_symbols_and_enters_and_tags(string):
|
||||
# from string import maketrans
|
||||
|
||||
try:
|
||||
|
||||
string = strip_tags(string)
|
||||
string = string.replace('\r\n', '')
|
||||
del_bad_symbols(string)
|
||||
except:
|
||||
pass
|
||||
|
||||
return string
|
||||
|
||||
|
||||
def del_bad_symbols(string):
|
||||
# from string import maketrans
|
||||
|
||||
try:
|
||||
|
||||
# string = strip_tags(string)
|
||||
# string = string.replace('\r\n','')
|
||||
string = string.strip()
|
||||
|
||||
while string.find(' ') > -1:
|
||||
string = string.replace(' ', ' ')
|
||||
# table = maketrans(' ', ' ')
|
||||
# string = string.translate(table)
|
||||
|
||||
while string.find(' ') > -1:
|
||||
string = string.replace(' ', ' ')
|
||||
except:
|
||||
pass
|
||||
|
||||
return string
|
||||
|
||||
|
||||
# def get_offers_from_cookie(request):
|
||||
# if 'oknaplast_right_offers' in request.COOKIES:
|
||||
# order_list = json.loads(request.COOKIES['oknaplast_right_offers'], encoding='utf8')
|
||||
# return WindowOfferModel.objects.filter(id__in=order_list)
|
||||
# else:
|
||||
# return []
|
||||
|
||||
|
||||
def del_nbsp(string):
|
||||
mapping = [
|
||||
(""", u'"'),
|
||||
('&', u'&'),
|
||||
('<', u'<'),
|
||||
('>', u'>'),
|
||||
(' ', u' '),
|
||||
('¡', u'¡'),
|
||||
('¢', u'¢'),
|
||||
('£', u'£'),
|
||||
('¤', u'¤'),
|
||||
('¥', u'¥'),
|
||||
('¦', u'¦'),
|
||||
('§', u'§'),
|
||||
('¨', u'¨'),
|
||||
('©', u'©'),
|
||||
('ª', u'ª'),
|
||||
('«', u'«'),
|
||||
('¬', u'¬'),
|
||||
('®', u'®'),
|
||||
('¯', u'¯'),
|
||||
('°', u'°'),
|
||||
('±', u'±'),
|
||||
('²', u'²'),
|
||||
('³', u'³'),
|
||||
('´', u'´'),
|
||||
('µ', u'µ'),
|
||||
('¶', u'¶'),
|
||||
('·', u'•'),
|
||||
('¸', u'¸'),
|
||||
('¹', u'¹'),
|
||||
('º', u'º'),
|
||||
('»', u'»'),
|
||||
('¼', u'¼'),
|
||||
('½', u'½'),
|
||||
('¾', u'¾'),
|
||||
('€', u'€'),
|
||||
('\n', ''),
|
||||
('\r', ''),
|
||||
('\t', ' '),
|
||||
('—', '-'),
|
||||
]
|
||||
for pair in mapping:
|
||||
string = string.replace(pair[0], pair[1])
|
||||
return string
|
||||
195
BaseModels/inter.py
Normal file
195
BaseModels/inter.py
Normal file
@@ -0,0 +1,195 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from django.http import HttpResponse
|
||||
import json
|
||||
import csv
|
||||
from .mailSender import techSendMail
|
||||
|
||||
import re
|
||||
|
||||
numbers = '0123456789.,'
|
||||
|
||||
|
||||
def get_fieldsNames_of_model(model):
|
||||
|
||||
fields_names = [item.name for item in model._meta.get_fields()]
|
||||
return fields_names
|
||||
|
||||
|
||||
def get_unique_url(model, name, url=None):
|
||||
from .functions import url_translit
|
||||
|
||||
if not url:
|
||||
url = url_translit(name)
|
||||
|
||||
try:
|
||||
obj = model.objects.get(url=url)
|
||||
except model.DoesNotExist:
|
||||
return url
|
||||
|
||||
urls = model.objects.all().values_list('url', flat=True)
|
||||
|
||||
i = 1
|
||||
while url in urls:
|
||||
url = f'{url}-{i}'
|
||||
i += 1
|
||||
|
||||
return url
|
||||
|
||||
|
||||
def dicts_join(dict1, dict2, inplace=False):
|
||||
result = dict1 if inplace else dict1.copy()
|
||||
result.update(dict2)
|
||||
return result
|
||||
|
||||
|
||||
def set_ru_locale():
|
||||
import locale
|
||||
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, 'ru_RU.utf8')
|
||||
except Exception as e:
|
||||
msg = '<b style="color : red;">!!!!! --- set_ru_locale exception error={0}<br>{1}</b>'.format(
|
||||
str(e),
|
||||
str(e.args)
|
||||
)
|
||||
print(msg)
|
||||
techSendMail(msg, 'set_ru_locale')
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_all_videos_from_html_content(html):
|
||||
if not html:
|
||||
return None
|
||||
|
||||
res = re.findall('iframe.*src=\"(.+?)\"', html)
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def get_all_photos_from_html_content(html):
|
||||
res = re.findall('src=\"(.+?)\"', html)
|
||||
return res
|
||||
|
||||
|
||||
def get_choices_value_by_choices_id(choices, id):
|
||||
for ch_id, ch_val in choices:
|
||||
if ch_id == id:
|
||||
return ch_val
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def sortByLength(inputStr):
|
||||
return len(inputStr)
|
||||
|
||||
|
||||
def get_current_language(request):
|
||||
return request.LANGUAGE_CODE
|
||||
|
||||
|
||||
def cut_to_number_w_point(string):
|
||||
import re
|
||||
|
||||
if not string:
|
||||
return string
|
||||
|
||||
string = string.replace(',', '.')
|
||||
# str_list = string.split(',')
|
||||
#
|
||||
# if len(str_list) > 1:
|
||||
# string = str_list[0]
|
||||
# else:
|
||||
# str_list = string.split('.')
|
||||
# if len(str_list) > 2:
|
||||
# string = u'{0}.{1}'.format(str_list[0], str_list[1])
|
||||
|
||||
try:
|
||||
# шаблон для обрезки до цифр
|
||||
p = '[0-9]+.[0-9]+'
|
||||
number = u''.join(re.findall(p, string))
|
||||
if number == u'':
|
||||
p = '[0-9]+'
|
||||
number = u''.join(re.findall(p, string))
|
||||
except:
|
||||
number = None
|
||||
return number
|
||||
|
||||
|
||||
def cut_to_number(string):
|
||||
import re
|
||||
|
||||
if not string:
|
||||
return string
|
||||
|
||||
# шаблон для обрезки до цифр
|
||||
p = '[\d]+'
|
||||
number = ''.join(re.findall(p, string))
|
||||
return number
|
||||
|
||||
|
||||
def range_dates(start, end):
|
||||
""" Returns the date range """
|
||||
from datetime import timedelta
|
||||
|
||||
list = [start + timedelta(days=days) for days in range(0, (end - start).days + 1)]
|
||||
return list
|
||||
|
||||
# assert start <= end
|
||||
# current = start.year * 12 + start.month - 1
|
||||
# end = end.year * 12 + end.month - 1
|
||||
# list = []
|
||||
# while current <= end:
|
||||
# yield date(current // 12, current % 12 + 1, 1)
|
||||
# current += 1
|
||||
|
||||
|
||||
# разбираем csv строку, получаем Dict
|
||||
def get_Dict_from_csv_data(csv_data):
|
||||
data = {}
|
||||
for item in csv_data.split(';'):
|
||||
try:
|
||||
if item:
|
||||
data.update(dict([item.split(':')[0:2]]))
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
return data
|
||||
# return dict([item.split(':')[0:2] for item in csv_data.split(';') if item])
|
||||
|
||||
|
||||
def cut_url_toPageName(url):
|
||||
pageName = url.split('/')[-1] # получаем урл страницы
|
||||
return pageName
|
||||
|
||||
|
||||
def jsonify():
|
||||
def decorator(func):
|
||||
def wrapper(request, *args, **kwargs):
|
||||
result = func(request, *args, **kwargs)
|
||||
return HttpResponse(json.dumps(result), mimetype='application/json')
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def check_perms_for_view_order(request, order):
|
||||
def decorator(func):
|
||||
def wrapper(request, *args, **kwargs):
|
||||
c_user = request.user
|
||||
if order:
|
||||
if c_user == order.user or c_user == order.forUser:
|
||||
return True
|
||||
else:
|
||||
if c_user.has_perm('OrdersApp.can_see_orders_all_companys'):
|
||||
return True
|
||||
else:
|
||||
if order.group == c_user.group and c_user.has_perm('OrdersApp.can_see_orders_self_company'):
|
||||
return True
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
31
BaseModels/json_funcs.py
Normal file
31
BaseModels/json_funcs.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import json
|
||||
|
||||
|
||||
def del_from_txt_bad_json_symbols(txt):
|
||||
|
||||
log = ''
|
||||
error = True
|
||||
while error and len(txt) > 0:
|
||||
try:
|
||||
json.loads(txt)
|
||||
error = None
|
||||
except json.JSONDecodeError as e:
|
||||
msg = '- длина контента = {2} - {1} - удален символ {0}'.format(
|
||||
txt[e.pos],
|
||||
str(e),
|
||||
str(len(txt)-1)
|
||||
)
|
||||
log = '{0}<br>{1}'.format(log, msg)
|
||||
print(msg)
|
||||
txt = txt[:e.pos] + txt[e.pos+1:]
|
||||
error = e
|
||||
|
||||
# import re
|
||||
# r_str = r'[{\[]([,:{}\[\]0-9.\-+A-zr-u \n\r\t]|".*:?")+[}\]]'
|
||||
# pattern = re.compile(r_str)
|
||||
# txt = re.sub(r_str, '',txt)
|
||||
# res = pattern.search(txt)
|
||||
# if res:
|
||||
# txt = res.string
|
||||
|
||||
return txt, log
|
||||
103
BaseModels/log/log_funcs.py
Normal file
103
BaseModels/log/log_funcs.py
Normal file
@@ -0,0 +1,103 @@
|
||||
from datetime import datetime, date
|
||||
from django.db.models.fields.files import ImageFieldFile
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
def send_mail_alert_w_data(obj, data, user=None):
|
||||
|
||||
# try:
|
||||
#
|
||||
# article = getattr(obj, 'article', None)
|
||||
# if article and article in ('10751', '10752', '10753', '10754', '10801', '10802', '10803', '10804'):
|
||||
# from BaseModels.mailSender import techSendMail
|
||||
# msg = f'change product {article}<br>' \
|
||||
# f'{datetime.now()}<br>' \
|
||||
# f'{str(user)}<br>' \
|
||||
# f'obj = {str(obj.__dict__)}<br>' \
|
||||
# f'data = {str(data)}<br>'
|
||||
# techSendMail(msg, 'tE checkpoint alert')
|
||||
#
|
||||
# except Exception as e:
|
||||
# print(f'send_mail_alert_w_data ERROR = {str(e)}')
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
||||
def get_normalized_data(data):
|
||||
|
||||
if type(data) == OrderedDict:
|
||||
data = dict(data)
|
||||
|
||||
if '_state' in data:
|
||||
del data['_state']
|
||||
|
||||
if type(data) == dict:
|
||||
for key, val in data.items():
|
||||
if type(data[key]) in (dict, list, OrderedDict):
|
||||
data[key] = get_normalized_data(val)
|
||||
if type(data[key]) in (datetime, date, ImageFieldFile):
|
||||
data[key] = str(val)
|
||||
|
||||
if type(data) == list:
|
||||
i = 0
|
||||
while i < len(data):
|
||||
# if type(item) == OrderedDict:
|
||||
# item = dict(item)
|
||||
item = data[i]
|
||||
|
||||
if type(item) in (dict, list, OrderedDict):
|
||||
data[i] = get_normalized_data(item)
|
||||
|
||||
if type(item) == dict:
|
||||
for key, val in item.items():
|
||||
if type(item[key]) in (datetime, date, ImageFieldFile):
|
||||
item[key] = str(val)
|
||||
|
||||
elif type(item) == list:
|
||||
ei = 0
|
||||
while ei < len(item):
|
||||
if type(item[ei]) in (datetime, date, ImageFieldFile):
|
||||
item[ei] = str(item[ei])
|
||||
|
||||
ei += 1
|
||||
|
||||
i += 1
|
||||
|
||||
return data
|
||||
|
||||
|
||||
|
||||
def prepare_data_for_json(data):
|
||||
|
||||
data = get_normalized_data(data)
|
||||
|
||||
# if type(data) == OrderedDict:
|
||||
# data = dict(data)
|
||||
#
|
||||
# if '_state' in data:
|
||||
# del data['_state']
|
||||
#
|
||||
# if type(data) == dict:
|
||||
# for key, val in data.items():
|
||||
# if type(data[key]) in (datetime, date, ImageFieldFile):
|
||||
# data[key] = str(val)
|
||||
#
|
||||
# if type(data) == list:
|
||||
# for item in data:
|
||||
# if type(item) == OrderedDict:
|
||||
# item = dict(item)
|
||||
#
|
||||
# if type(item) == dict:
|
||||
# for key, val in item.items():
|
||||
# if type(data[key]) in (datetime, date, ImageFieldFile):
|
||||
# item[key] = str(val)
|
||||
#
|
||||
# elif type(item) == list:
|
||||
# for el in item:
|
||||
# if type(el) in (datetime, date, ImageFieldFile):
|
||||
# el = str(el)
|
||||
|
||||
|
||||
|
||||
return data
|
||||
259
BaseModels/logging_change_data.py
Normal file
259
BaseModels/logging_change_data.py
Normal file
@@ -0,0 +1,259 @@
|
||||
from .mailSender import techSendMail
|
||||
|
||||
|
||||
|
||||
# def get_order_changes_list_for_template(order):
|
||||
#
|
||||
# if order.json_data and 'data_log' in order.json_data:
|
||||
#
|
||||
# i = 0
|
||||
# changes_log = []
|
||||
#
|
||||
# if len(order.json_data['data_log']) > 0:
|
||||
# first_log_record = order.json_data['data_log'][0]
|
||||
#
|
||||
# while i<len(order.json_data['data_log'])-1:
|
||||
# dict1 = order.json_data['data_log'][i]['order_data']
|
||||
# dict2 = order.json_data['data_log'][i+1]['order_data']
|
||||
#
|
||||
# res = {'order' : dict_compare(dict1, dict2)}
|
||||
# rec_DT = res['order']['modified']['now_DT'][1]
|
||||
# del res['order']['modified']['now_DT']
|
||||
#
|
||||
# # for item in res['order'].values():
|
||||
# # if type(item) == set:
|
||||
# # item = list(item)
|
||||
#
|
||||
# # dishes_set1 = order.json_data['data_log'][i]['dishes_data'])
|
||||
# # dishes_set2 = set(order.json_data['data_log'][i+1]['dishes_data'])
|
||||
# # res = dishes_set1.symmetric_difference(dishes_set2)
|
||||
# # dict1 = {'dishes': order.json_data['data_log'][i]['dishes_data']}
|
||||
# # dict2 = {'dishes': order.json_data['data_log'][i + 1]['dishes_data']}
|
||||
# list1 = []
|
||||
# list2 = []
|
||||
# list1.extend(order.json_data['data_log'][i]['dishes_data'])
|
||||
# list2.extend(order.json_data['data_log'][i + 1]['dishes_data'])
|
||||
# # res['modified'].
|
||||
# res.update({'dishes' : list_compare(list1, list2)})
|
||||
# changes_log.append({rec_DT: res})
|
||||
#
|
||||
# i += 1
|
||||
#
|
||||
# return {
|
||||
# 'changes_log' : changes_log,
|
||||
# 'first_log_record' : first_log_record
|
||||
# }
|
||||
#
|
||||
# return {}
|
||||
|
||||
|
||||
|
||||
def get_changes_for_Dicts(old_Dict, newDict):
|
||||
|
||||
res = {}
|
||||
required_save = False
|
||||
|
||||
try:
|
||||
|
||||
# order_Dict = get_orderDict_by_order(order)
|
||||
|
||||
|
||||
# if order.json_data and 'data_log' in order.json_data:
|
||||
# last_rec = order.json_data['data_log'][-1]
|
||||
|
||||
res = dict_compare(old_Dict, newDict)
|
||||
# del res['modified']['now_DT']
|
||||
if res and (res['modified'] or res['added'] or res['removed']):
|
||||
required_save = True
|
||||
|
||||
# res = dict_compare({'dishes' : last_rec['dishes_data']}, {'dishes' : order_Dict['dishes_data']})
|
||||
# if res['modified'] or res['added'] or res['removed']:
|
||||
# required_save = True
|
||||
# else:
|
||||
# required_save = True
|
||||
|
||||
# json_data = order.json_data
|
||||
# if required_save:
|
||||
# if not json_data:
|
||||
# json_data = {}
|
||||
# if not 'data_log' in json_data:
|
||||
# json_data.update({'data_log' : []})
|
||||
#
|
||||
# json_data['data_log'].append(order_Dict)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
msg = 'get_changes_for_Dicts Error = {0}<br>{1}'.format(str(e), str(e.args))
|
||||
print(msg)
|
||||
techSendMail(msg)
|
||||
|
||||
return res, required_save
|
||||
|
||||
|
||||
|
||||
|
||||
def list_compare(old_data_list, new_data_list):
|
||||
|
||||
res = {
|
||||
'added': [],
|
||||
'removed': [],
|
||||
'modified': [],
|
||||
}
|
||||
|
||||
old_list = []
|
||||
old_list.extend(old_data_list)
|
||||
new_list = []
|
||||
new_list.extend(new_data_list)
|
||||
|
||||
if new_list == old_list:
|
||||
return {}
|
||||
|
||||
|
||||
i2 = 0
|
||||
list_w_id = False
|
||||
while i2 < len(new_list):
|
||||
req_del = False
|
||||
i1 = 0
|
||||
|
||||
if 'id' in new_list[i2]:
|
||||
list_w_id = True
|
||||
while i1 < len(old_list):
|
||||
if old_list[i1] == new_list[i2]:
|
||||
req_del = True
|
||||
else:
|
||||
|
||||
if old_list[i1]['id'] == new_list[i2]['id']:
|
||||
if type(old_list[i1]) == dict:
|
||||
res_dict_compare = dict_compare(old_list[i1], new_list[i2])
|
||||
if 'property' in new_list[i2]:
|
||||
res['modified'].append({new_list[i2]['property']['name']: res_dict_compare})
|
||||
else:
|
||||
res['modified'].append({new_list[i2]['id']: res_dict_compare})
|
||||
elif type(old_list[i1]) in [tuple, list]:
|
||||
res_list_compare = list_compare(old_list[i1], new_list[i2])
|
||||
res['modified'].append(res_list_compare)
|
||||
else:
|
||||
res['modified'].append(new_list[i2])
|
||||
|
||||
req_del = True
|
||||
# else:
|
||||
# i1 += 1
|
||||
|
||||
|
||||
|
||||
|
||||
if req_del:
|
||||
del old_list[i1]
|
||||
del new_list[i2]
|
||||
break
|
||||
else:
|
||||
i1 += 1
|
||||
else:
|
||||
if not new_list[i2] in old_list:
|
||||
if i2 < len(old_list):
|
||||
res['modified'].append([old_list[i2], new_list[i2]])
|
||||
del old_list[i2]
|
||||
else:
|
||||
res['modified'].append(new_list[i2])
|
||||
del new_list[i2]
|
||||
continue
|
||||
else:
|
||||
i2 += 1
|
||||
|
||||
req_del = True
|
||||
|
||||
|
||||
|
||||
# если не была найдена в обоих списках - значит добавлена
|
||||
if not req_del:
|
||||
res['added'].append(new_list[i2])
|
||||
del new_list[i2]
|
||||
# else:
|
||||
# i2 += 1
|
||||
|
||||
if list_w_id and old_list:
|
||||
res['removed'].extend(old_list)
|
||||
|
||||
if res['added'] or res['modified'] or res['removed']:
|
||||
return res
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
|
||||
def dict_compare(old_d2, new_d1):
|
||||
|
||||
|
||||
added = []
|
||||
removed = []
|
||||
modified = []
|
||||
|
||||
if not old_d2 and not new_d1:
|
||||
return {}
|
||||
|
||||
try:
|
||||
if not old_d2 and new_d1:
|
||||
added.append('None > ' + str(new_d1))
|
||||
# for val, key in new_d1.items():
|
||||
# added.update({key: (None, val)})
|
||||
elif not new_d1 and old_d2:
|
||||
# modified = (old_d2)
|
||||
# removed = {}
|
||||
# for val, key in old_d2.items():
|
||||
# removed.update({key: (None, val)})
|
||||
removed.append(str(old_d2) + ' > None')
|
||||
else:
|
||||
d1_keys = set(new_d1.keys())
|
||||
d2_keys = set(old_d2.keys())
|
||||
intersect_keys = d1_keys.intersection(d2_keys)
|
||||
added = d1_keys - d2_keys
|
||||
removed = d2_keys - d1_keys
|
||||
modified = {}
|
||||
for o in intersect_keys:
|
||||
if new_d1[o] != old_d2[o]:
|
||||
if type(new_d1[o]) == dict:
|
||||
modified.update({
|
||||
o: dict_compare(old_d2[o], new_d1[o])
|
||||
})
|
||||
elif type(new_d1[o]) in [list, tuple]:
|
||||
modified.update({
|
||||
o: list_compare(old_d2[o], new_d1[o])
|
||||
})
|
||||
else:
|
||||
modified.update({
|
||||
o: (old_d2[o], new_d1[o])
|
||||
})
|
||||
# modified = {o : (new_d1[o], old_d2[o]) for o in intersect_keys if new_d1[o] != old_d2[o]}
|
||||
same = set(o for o in intersect_keys if new_d1[o] == old_d2[o])
|
||||
|
||||
# if not added:
|
||||
# added = []
|
||||
# if not removed:
|
||||
# removed = []
|
||||
# if not modified:
|
||||
# modified = []
|
||||
|
||||
|
||||
if added or removed or modified:
|
||||
return {
|
||||
'added': added,
|
||||
'removed': removed,
|
||||
'modified': modified,
|
||||
# 'added' : list(added),
|
||||
# 'removed' : list(removed),
|
||||
# 'modified' : list(modified),
|
||||
# 'same' : same
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
|
||||
msg = 'dict_compare Error = {0}<br>{1}<br>{2}<br>{3}'.format(
|
||||
str(e),
|
||||
str(e.args),
|
||||
old_d2,
|
||||
new_d1
|
||||
)
|
||||
print(msg)
|
||||
techSendMail(msg)
|
||||
|
||||
return {}
|
||||
354
BaseModels/mailSender.py
Normal file
354
BaseModels/mailSender.py
Normal file
@@ -0,0 +1,354 @@
|
||||
## -*- coding: utf-8 -*-
|
||||
|
||||
__author__ = 'SDE'
|
||||
|
||||
from django.core.mail import EmailMultiAlternatives
|
||||
# from AuthApp.models import UserProfileModel
|
||||
import smtplib
|
||||
# from tEDataProj.settings import prod_server
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from email.mime.application import MIMEApplication
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from os.path import basename
|
||||
from email.mime.base import MIMEBase
|
||||
from email import encoders
|
||||
import ssl
|
||||
import time
|
||||
import random
|
||||
# from tEDataProj import settings
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
# tech@truenergy.by
|
||||
# k7n2d3ZFZo4@CU5$4YDk
|
||||
|
||||
# administrator@truenergy.by
|
||||
# 6&#WfW8$qR2w8uv69e5$
|
||||
|
||||
|
||||
# def fix_mailing_links_in_mail(html):
|
||||
# from GeneralApp.views import get_cur_domain
|
||||
# serv_domain, client_domain = get_cur_domain()
|
||||
#
|
||||
# while 'src="/media/' in html:
|
||||
# html = html.replace('src="/media/', f'src="{serv_domain}media/')
|
||||
#
|
||||
# return html
|
||||
|
||||
|
||||
def mailing_direct_by_maillist(subject, from_email, to, html_content, attachments=None):
|
||||
log = ''
|
||||
for email in to:
|
||||
res = admin_send_mail_by_SMTPlib(subject, from_email, email, html_content, attachments)
|
||||
# print(str(res))
|
||||
log = '{0}<br>{1}'.format(log, str(res))
|
||||
time.sleep(random.randint(1, 5))
|
||||
|
||||
return log
|
||||
|
||||
|
||||
def prepare_attach_file(filepath, filename=None):
|
||||
try:
|
||||
|
||||
if not filename:
|
||||
filename = basename(filepath)
|
||||
|
||||
if not settings.MEDIA_ROOT in filepath:
|
||||
filepath = f'{settings.MEDIA_ROOT}{filepath}'
|
||||
|
||||
with open(filepath, "rb") as fil:
|
||||
part = MIMEApplication(
|
||||
fil.read(),
|
||||
Name=filename
|
||||
)
|
||||
# After the file is closed
|
||||
part['Content-Disposition'] = 'attachment; filename="%s"' % filename
|
||||
except Exception as e:
|
||||
msg = f'prepare_attach_file Error = {str(e)}'
|
||||
techSendMail(msg, title='prepare_attach_file')
|
||||
return msg
|
||||
|
||||
return part
|
||||
|
||||
|
||||
def prepare_xls_attach_by_xls_virtual_file(virtual_file, filename):
|
||||
ctype = 'application/octet-stream'
|
||||
maintype, subtype = ctype.split('/', 1)
|
||||
# with open(filepath, 'rb') as fp:
|
||||
file = MIMEBase(maintype, subtype) # Используем общий MIME-тип
|
||||
file.set_payload(virtual_file) # Добавляем содержимое общего типа (полезную нагрузку)
|
||||
# fp.close()
|
||||
encoders.encode_base64(file) # Содержимое должно кодироваться как Base64
|
||||
|
||||
file.add_header('Content-Disposition', 'attachment', filename=filename)
|
||||
|
||||
return file
|
||||
|
||||
|
||||
def admin_send_mail_by_SMTPlib(subject, from_email, to, html_content, attachments=None):
|
||||
res = None
|
||||
|
||||
try:
|
||||
# smtp_server = 'mail.cln.by' # 'mail.truenergy.by'
|
||||
# smtp_port = 2525 # 587
|
||||
# smtp_password = 'clNdt6a8a' # u'98q3$IjxH%RUIxySw8R2'
|
||||
# smtp_login = 'support@cln.by' # 'support@truenergy.by'
|
||||
# from_email = smtp_login
|
||||
|
||||
try:
|
||||
smtp_server = 'mail.truenergy.by'
|
||||
smtp_port = 587
|
||||
smtp_password = 'eg4$#95Xp0T*V%ig5BbR'
|
||||
smtp_login = 'support@truenergy.by'
|
||||
res = send_mail_by_SMTPlib(subject, from_email, to, html_content, smtp_server, smtp_port, smtp_login,
|
||||
smtp_password, attachments)
|
||||
except:
|
||||
smtp_server = 'mail.truenergy.by'
|
||||
smtp_port = 25
|
||||
smtp_password = 'PowH@aL0a4%$iz0Uo5V$'
|
||||
smtp_login = 'tech@truenergy.by'
|
||||
res = send_mail_by_SMTPlib(subject, smtp_login, to, html_content, smtp_server, smtp_port, smtp_login,
|
||||
smtp_password, attachments)
|
||||
|
||||
except Exception as e:
|
||||
# from Baldenini_site.SMS_sender import send_SMS
|
||||
# send_SMS(u'375296177827', u'send_mail_by_SMTPlib error = {0}'.format(str(e)), urgent=True)
|
||||
msg = 'admin_send_mail_by_SMTPlib error = {0}'.format(str(e))
|
||||
print(msg)
|
||||
# techSendMail(msg)
|
||||
|
||||
return str(res)
|
||||
|
||||
|
||||
def send_mail_by_SMTPlib(subject, from_email, to_init, html_content, smtp_server, smtp_port, smtp_login, smtp_password,
|
||||
attachments=None):
|
||||
to = to_init
|
||||
if not settings.prod_server:
|
||||
to = 'web@syncsystems.net'
|
||||
else:
|
||||
to = to_init
|
||||
try:
|
||||
from settings_local import DEBUG
|
||||
except:
|
||||
print('get settings_local fail')
|
||||
|
||||
res = None
|
||||
mail_lib = None
|
||||
|
||||
time.sleep(1)
|
||||
|
||||
try:
|
||||
# context = ssl.create_default_context()
|
||||
|
||||
mail_lib = smtplib.SMTP(smtp_server, smtp_port)
|
||||
|
||||
res = mail_lib.ehlo()
|
||||
|
||||
res = mail_lib.starttls() # context=context)
|
||||
# print('mail_lib.starttls = {0}'.format(str(res)))
|
||||
|
||||
res = mail_lib.ehlo()
|
||||
# print('mail_lib.ehlo = {0}'.format(str(res)))
|
||||
|
||||
res = mail_lib.set_debuglevel = 2
|
||||
# print('mail_lib.set_debuglevel = {0}'.format(str(res)))
|
||||
|
||||
res = mail_lib.esmtp_features['auth'] = 'LOGIN PLAIN'
|
||||
# print('mail_lib.esmtp_features = {0}'.format(str(res)))
|
||||
|
||||
res = mail_lib.login(smtp_login, smtp_password)
|
||||
# print('mail_lib.login = {0}'.format(str(res)))
|
||||
|
||||
res = None
|
||||
|
||||
if type(to) in (list, tuple):
|
||||
if 'support@truenergy.by' in to:
|
||||
to.remove('support@truenergy.by')
|
||||
|
||||
if len(to) > 1:
|
||||
to_str = u', '.join(to)
|
||||
else:
|
||||
to_str = to[0]
|
||||
else:
|
||||
if to == 'support@truenergy.by':
|
||||
return None
|
||||
to_str = to
|
||||
to = []
|
||||
to.append(to_str)
|
||||
|
||||
if type(subject) != str:
|
||||
try:
|
||||
subject = subject.decode('utf-8')
|
||||
except:
|
||||
try:
|
||||
subject = subject.encode('utf-8')
|
||||
except:
|
||||
pass
|
||||
|
||||
msg = MIMEMultipart()
|
||||
from email.headerregistry import Address
|
||||
msg['From'] = from_email
|
||||
msg['Reply-To'] = from_email
|
||||
# msg['In-Reply-To'] = "email2@example.com"
|
||||
msg['To'] = to_str
|
||||
msg['Subject'] = subject
|
||||
msg.attach(MIMEText(html_content, 'html', 'utf-8'))
|
||||
|
||||
# print('attach message complete')
|
||||
|
||||
if attachments:
|
||||
if type(attachments) in (list, tuple):
|
||||
try:
|
||||
for item in attachments:
|
||||
res = msg.attach(item)
|
||||
# print('attach file complete = {0}'.format(str(res)))
|
||||
except:
|
||||
res = msg.attach(attachments)
|
||||
# print('except attach file complete = {0}'.format(str(res)))
|
||||
else:
|
||||
res = msg.attach(attachments)
|
||||
# print('else attach file complete = {0}'.format(str(res)))
|
||||
|
||||
res = mail_lib.sendmail(from_email, to, msg.as_string())
|
||||
|
||||
msg = mail_lib.quit()
|
||||
# print('mail_lib.quit = {0}'.format(str(msg)))
|
||||
|
||||
except Exception as e:
|
||||
# from Baldenini_site.SMS_sender import send_SMS
|
||||
# send_SMS(u'375296177827', u'send_mail_by_SMTPlib error = {0}'.format(str(e)), urgent=True)
|
||||
msg = 'send_mail_by_SMTPlib error = {0}'.format(str(e))
|
||||
print(msg)
|
||||
try:
|
||||
mail_lib.quit()
|
||||
# print('mail_lib.quit = {0}'.format(str(msg)))
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
print(str(mail_lib.__dict__))
|
||||
except:
|
||||
pass
|
||||
|
||||
return msg
|
||||
# techSendMail(msg)
|
||||
|
||||
msg = 'send_mail_by_SMTPlib subj={3} init_to={2} to={0} res={1}'.format(str(to), str(res), str(to_init),
|
||||
str(subject))
|
||||
print(msg)
|
||||
|
||||
return msg
|
||||
|
||||
|
||||
def sendMail(subject, text_content, from_email, to, html_content):
|
||||
print('sendMail to {0}'.format(str(to)))
|
||||
|
||||
admin_send_mail_by_SMTPlib(subject, from_email, [to], html_content)
|
||||
|
||||
# msg = EmailMultiAlternatives(subject, text_content, from_email, [to])
|
||||
# msg.attach_alternative(html_content, "text/html")
|
||||
# msg.send()
|
||||
print(u'Accept')
|
||||
return u'Accept'
|
||||
|
||||
|
||||
# def techSendMail_for_top_management(html_content, title=None):
|
||||
# try:
|
||||
#
|
||||
# # if not prod_server:
|
||||
# # msg = '{0}. Not sended because is local'.format(html_content)
|
||||
# # print(msg)
|
||||
# # return msg
|
||||
# from AuthApp.models import User
|
||||
# from django.db.models import Q
|
||||
#
|
||||
# # to = ['web@syncsystems.net']
|
||||
# to = User.objects.filter(
|
||||
# Q(is_superuser=True) | Q(groups__name__in=[
|
||||
# 'Отдел продаж: Начальник отдела продаж', 'Управляющий',
|
||||
# 'Бухгалтерия: Главный бухгалтер'
|
||||
# ]),
|
||||
# is_active=True,
|
||||
# is_staff=True
|
||||
# ).values_list('email', flat=True)
|
||||
# to = list(to)
|
||||
# to.append('office@truenergy.by')
|
||||
#
|
||||
# print('techSendMail_for_top_management')
|
||||
# if title:
|
||||
# subject = title
|
||||
# else:
|
||||
# subject = u'truEnergy Data техническое оповещение'
|
||||
# from_email = 'support@truenergy.by'
|
||||
#
|
||||
# res = admin_send_mail_by_SMTPlib(subject, from_email, to, html_content)
|
||||
#
|
||||
# # msg = EmailMultiAlternatives(subject, text_content, from_email, to)
|
||||
# # msg.attach_alternative(html_content, "text/html")
|
||||
# # msg.send()
|
||||
# print(res)
|
||||
# return u'Accept'
|
||||
#
|
||||
# except Exception as e:
|
||||
# msg = 'techSendMail_for_top_management error={0}'.format(str(e))
|
||||
# techSendMail(msg)
|
||||
# print(msg)
|
||||
#
|
||||
# return 'Fail'
|
||||
|
||||
|
||||
def techSendMail_for_specified_email_list(html_content, email_list, title=None):
|
||||
try:
|
||||
|
||||
print('techSendMail_for_specified_email_list')
|
||||
if title:
|
||||
subject = title
|
||||
else:
|
||||
subject = u'truEnergy Data техническое оповещение'
|
||||
from_email = 'support@truenergy.by'
|
||||
|
||||
res = admin_send_mail_by_SMTPlib(subject, from_email, email_list, html_content)
|
||||
|
||||
print(res)
|
||||
return u'Accept'
|
||||
|
||||
except Exception as e:
|
||||
msg = 'techSendMail_for_specified_email_list error={0}'.format(str(e))
|
||||
techSendMail(msg)
|
||||
print(msg)
|
||||
|
||||
return 'Fail'
|
||||
|
||||
|
||||
def techSendMail(html_content, title=None, add_emails=None):
|
||||
# if not prod_server:
|
||||
# msg = '{0}. Not sended because is local'.format(html_content)
|
||||
# print(msg)
|
||||
# return msg
|
||||
|
||||
print('techSendMail')
|
||||
|
||||
try:
|
||||
# subject = u'truEnergy Data техническое оповещение'
|
||||
from_email = 'support@truenergy.by'
|
||||
to = ['web@syncsystems.net']
|
||||
if add_emails:
|
||||
to.extend(add_emails)
|
||||
text_content = 'Technical message from truEnergy.'
|
||||
|
||||
if title:
|
||||
subject = title
|
||||
else:
|
||||
subject = u'truEnergy Data техническое оповещение'
|
||||
|
||||
res = admin_send_mail_by_SMTPlib(subject, from_email, to, html_content)
|
||||
|
||||
print(res)
|
||||
|
||||
except Exception as e:
|
||||
msg = 'techSendMail error={0}'.format(str(e))
|
||||
# techSendMail(msg)
|
||||
print(msg)
|
||||
|
||||
return u'Accept'
|
||||
163
BaseModels/messages.py
Normal file
163
BaseModels/messages.py
Normal file
@@ -0,0 +1,163 @@
|
||||
## -*- coding: utf-8 -*-
|
||||
__author__ = 'SDE'
|
||||
# from Baldenini_site.inter import jsonify
|
||||
|
||||
def get_error_message_Dict(show_icon=None):
|
||||
print('get_error_message_Dict')
|
||||
Dict = {
|
||||
'form_style' : u'border-color: #FFBBBB; background-color: #FFEAEA;',
|
||||
}
|
||||
if show_icon:
|
||||
Dict.update({
|
||||
'form_icon' : 'canceled.png',
|
||||
})
|
||||
|
||||
return Dict
|
||||
|
||||
|
||||
def get_good_message_Dict(show_icon=None):
|
||||
Dict = {
|
||||
'form_style' : u'border-color: #BBFFBB; background-color: #EAFFEA;',
|
||||
}
|
||||
|
||||
if show_icon:
|
||||
Dict.update({
|
||||
'form_icon' : 'accepted.png',
|
||||
})
|
||||
|
||||
return Dict
|
||||
|
||||
|
||||
def get_return_to_ready_but():
|
||||
return {
|
||||
'buttons' : u'<div class="button close">ГОТОВО</div>'
|
||||
}
|
||||
|
||||
|
||||
def get_return_to_choice_buts(but_ok_name, but_cancel_name):
|
||||
return {
|
||||
'buttons' : u'<div class="button ok">{0}</div>'
|
||||
u'<div class="button cancel">{1}</div>'.format(but_ok_name, but_cancel_name)
|
||||
}
|
||||
|
||||
|
||||
def get_error_message(caption, text, show_icon=None):
|
||||
|
||||
Dict = {
|
||||
'message' : text,
|
||||
'caption' : caption
|
||||
}
|
||||
Dict.update(get_error_message_Dict(show_icon))
|
||||
Dict.update(get_return_to_ready_but())
|
||||
|
||||
return Dict
|
||||
|
||||
|
||||
# @jsonify()
|
||||
def show_error_message(caption, text, show_icon=None):
|
||||
from django.template.loader import render_to_string
|
||||
|
||||
return {'error':'error',
|
||||
'html': render_to_string(
|
||||
'm_show_message.html',
|
||||
get_error_message(caption, text, show_icon)
|
||||
)
|
||||
}
|
||||
|
||||
# @jsonify()
|
||||
def show_good_message(caption, text):
|
||||
from django.template.loader import render_to_string
|
||||
|
||||
return {'html': render_to_string(
|
||||
'm_show_message.html',
|
||||
get_good_message(caption, text)
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def show_good_message_ok_go_to_blank_page(caption, text, button_caption, url):
|
||||
from django.template.loader import render_to_string
|
||||
|
||||
return {'html': render_to_string(
|
||||
'm_show_message.html',
|
||||
get_good_message_ok_go_to_blank_page(caption, text, button_caption, url)
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
# def show_choice_message_w_input(caption, text, but_ok_name, but_cancel_name, form):
|
||||
# from django.template.loader import render_to_string
|
||||
#
|
||||
# return {'html': render_to_string(
|
||||
# 'Messages/m_show_message.html',
|
||||
# get_choice_message(caption, text, but_ok_name, but_cancel_name)
|
||||
# )
|
||||
# }
|
||||
|
||||
|
||||
def show_choice_message_green(caption, text, but_ok_name, but_cancel_name, form=None):
|
||||
from django.template.loader import render_to_string
|
||||
|
||||
return {'html': render_to_string(
|
||||
'm_show_message.html',
|
||||
get_choice_message(caption, text, but_ok_name, but_cancel_name, form, u'green')
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def show_choice_message_red(caption, text, but_ok_name, but_cancel_name, form=None):
|
||||
from django.template.loader import render_to_string
|
||||
|
||||
return {'html': render_to_string(
|
||||
'm_show_message.html',
|
||||
get_choice_message(caption, text, but_ok_name, but_cancel_name, form, u'red')
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
def get_choice_message(caption, text, but_ok_name, but_cancel_name, form=None, color=u'red', show_icon=None):
|
||||
|
||||
Dict = {
|
||||
'message' : text,
|
||||
'caption' : caption,
|
||||
'form' : form
|
||||
}
|
||||
|
||||
if color == u'red':
|
||||
Dict.update(get_error_message_Dict(show_icon))
|
||||
elif color == u'green':
|
||||
Dict.update(get_good_message_Dict(show_icon))
|
||||
|
||||
Dict.update(get_return_to_choice_buts(but_ok_name, but_cancel_name))
|
||||
|
||||
return Dict
|
||||
|
||||
|
||||
def get_but_ok_go_to_blank_page(button_caption, url):
|
||||
return {
|
||||
'buttons' : u'<p class="button_box"><a target="_blank" class="button close" href="/restaurant_control/users_control/show_users_report/{0}">{1}</a></p>'.format(url,button_caption)
|
||||
}
|
||||
|
||||
|
||||
def get_good_message_ok_go_to_blank_page(caption, text, button_caption, url, show_icon=None):
|
||||
|
||||
Dict = {
|
||||
'message' : text,
|
||||
'caption' : caption
|
||||
}
|
||||
Dict.update(get_good_message_Dict(show_icon))
|
||||
Dict.update(get_but_ok_go_to_blank_page(button_caption, url))
|
||||
|
||||
return Dict
|
||||
|
||||
|
||||
def get_good_message(caption, text, show_icon=None):
|
||||
|
||||
Dict = {
|
||||
'message' : text,
|
||||
'caption' : caption
|
||||
}
|
||||
Dict.update(get_good_message_Dict(show_icon))
|
||||
Dict.update(get_return_to_ready_but())
|
||||
|
||||
return Dict
|
||||
21
BaseModels/middlewares/web_requests_middleware.py
Normal file
21
BaseModels/middlewares/web_requests_middleware.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class WebRequestMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
|
||||
response = self.get_response(request)
|
||||
|
||||
if not '/v1/alert/' in request.path:
|
||||
user = getattr(request, 'user', None)
|
||||
if user and not user.is_anonymous and user.user_profile:
|
||||
user.user_profile.last_web_request = datetime.now()
|
||||
user.user_profile.save(update_fields=['last_web_request'])
|
||||
|
||||
# from GeneralApp.temp_data_funcs import add_element_in_tmp_data_list
|
||||
# add_element_in_tmp_data_list('user_activity', user.email, 'activities_DT', str(user.user_profile.last_web_request))
|
||||
|
||||
return response
|
||||
211
BaseModels/office_documents_utils.py
Normal file
211
BaseModels/office_documents_utils.py
Normal file
@@ -0,0 +1,211 @@
|
||||
import copy
|
||||
|
||||
from openpyxl import Workbook
|
||||
from django.http import HttpResponse
|
||||
from openpyxl.writer.excel import save_virtual_workbook
|
||||
from openpyxl.utils import get_column_letter
|
||||
from BaseModels.mailSender import techSendMail
|
||||
from openpyxl.styles import PatternFill, Font, Alignment
|
||||
from openpyxl.styles.borders import Border, Side
|
||||
from openpyxl.styles.numbers import BUILTIN_FORMATS
|
||||
from colorsys import rgb_to_hls
|
||||
|
||||
|
||||
def pairwise(iterable):
|
||||
a = iter(iterable)
|
||||
return zip(a, a)
|
||||
|
||||
|
||||
options_params_splitter = '<|>'
|
||||
|
||||
|
||||
def set_col_options(ws, row, col, rows_len, options):
|
||||
if type(options) == str:
|
||||
options = dict(item.split("=")[::1] for item in options.split('&'))
|
||||
|
||||
cols_set = 1
|
||||
if options:
|
||||
|
||||
exists_group_option_for_column = False
|
||||
for key in options.keys():
|
||||
if key.startswith('g_col_'):
|
||||
exists_group_option_for_column = True
|
||||
break
|
||||
|
||||
if 'cols_merge' in options and options['cols_merge']:
|
||||
cols_set = int(options['cols_merge'])
|
||||
if cols_set > 1:
|
||||
ws.merge_cells(start_row=row, start_column=col, end_row=row, end_column=col + cols_set - 1)
|
||||
|
||||
if exists_group_option_for_column:
|
||||
g_col_back_color = None
|
||||
if 'g_col_back_color' in options and options['g_col_back_color']:
|
||||
g_col_back_color = options['g_col_back_color']
|
||||
g_col_num_w_sep = None
|
||||
if 'g_col_num_w_sep' in options and options['g_col_num_w_sep']:
|
||||
g_col_num_w_sep = options['g_col_num_w_sep']
|
||||
|
||||
cur_col = col
|
||||
while cur_col < col + cols_set:
|
||||
cur_row = row
|
||||
while cur_row < rows_len:
|
||||
if g_col_back_color:
|
||||
ws.cell(row=cur_row, column=cur_col).fill = PatternFill('solid', fgColor=g_col_back_color)
|
||||
if g_col_num_w_sep:
|
||||
ws.cell(row=cur_row, column=cur_col).number_format = '#,##0.00'
|
||||
cur_row += 1
|
||||
cur_col += 1
|
||||
|
||||
if 'col_show_total' in options and options['col_show_total']:
|
||||
ws.cell(row=rows_len, column=col).font = Font(bold=True)
|
||||
ws.cell(row=rows_len, column=col).value = "=SUM({0}{1}:{0}{2})".format(
|
||||
get_column_letter(col),
|
||||
row + 1,
|
||||
rows_len - 1
|
||||
)
|
||||
ws.cell(row=rows_len, column=col).number_format = '#,##0.00'
|
||||
|
||||
if 'back_color' in options and options['back_color']:
|
||||
ws.cell(row=row, column=col).fill = PatternFill('solid', fgColor=options['back_color'])
|
||||
|
||||
if 'bold' in options and options['bold']:
|
||||
ws.cell(row=row, column=col).font = Font(bold=True)
|
||||
|
||||
if 'col_bold' in options and options['col_bold']:
|
||||
cur_col = col
|
||||
while cur_col < col + cols_set:
|
||||
cur_row = row
|
||||
while cur_row < rows_len:
|
||||
ws.cell(row=cur_row, column=cur_col).font = Font(bold=True)
|
||||
cur_row += 1
|
||||
cur_col += 1
|
||||
|
||||
return cols_set
|
||||
|
||||
|
||||
def add_table_in_workbook(work_sheet, data, convert_minus_to_null=False, headers_rows_count=0):
|
||||
thin_border = Border(left=Side(style='thin'),
|
||||
right=Side(style='thin'),
|
||||
top=Side(style='thin'),
|
||||
bottom=Side(style='thin'))
|
||||
|
||||
r = 1
|
||||
|
||||
for row in data:
|
||||
|
||||
try:
|
||||
|
||||
c = 1
|
||||
cols = row
|
||||
if type(data) == dict:
|
||||
cols = row.values()
|
||||
for val in cols:
|
||||
options = None
|
||||
|
||||
inc_c = 1
|
||||
|
||||
work_sheet.cell(row=r, column=c).border = thin_border
|
||||
|
||||
# получаем опции
|
||||
if type(val) == str:
|
||||
val_w_options = val.split(options_params_splitter)
|
||||
if len(val_w_options) > 1:
|
||||
val = val_w_options[0]
|
||||
# применяем опции
|
||||
inc_c = set_col_options(work_sheet, row=r, col=c, rows_len=len(data) + 1,
|
||||
options=val_w_options[1])
|
||||
elif type(val) == dict:
|
||||
inc_c = set_col_options(work_sheet, row=r, col=c, rows_len=len(data) + 1, options=val)
|
||||
val = val['val']
|
||||
|
||||
# если стоит опция "минусовые значения преобразовывать в нулевые"
|
||||
if convert_minus_to_null:
|
||||
try:
|
||||
if val < 0:
|
||||
val = 0
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
work_sheet.cell(row=r, column=c).value = val
|
||||
except:
|
||||
work_sheet.cell(row=r, column=c).value = str(val)
|
||||
|
||||
c += inc_c
|
||||
|
||||
except Exception as e:
|
||||
msg = f'add_table_in_workbook in row {str(r)} ERROR = {str(e)}'
|
||||
print(msg)
|
||||
|
||||
r += 1
|
||||
|
||||
try:
|
||||
|
||||
dims = {}
|
||||
row_c = 0
|
||||
for row in work_sheet.rows:
|
||||
# не подгоняем данные под надписи в хэдере
|
||||
if row_c < headers_rows_count:
|
||||
row_c += 1
|
||||
continue
|
||||
|
||||
for cell in row:
|
||||
if cell.value:
|
||||
dims[cell.column] = max((dims.get(cell.column, 0), len(str(cell.value))))
|
||||
|
||||
row_c += 1
|
||||
|
||||
for col, value in dims.items():
|
||||
if value > 150:
|
||||
value = 150
|
||||
if value < 3:
|
||||
value = 3
|
||||
work_sheet.column_dimensions[get_column_letter(col)].width = value
|
||||
|
||||
except Exception as e:
|
||||
msg = f'add_table_in_workbook in sets width ERROR = {str(e)}'
|
||||
print(msg)
|
||||
|
||||
return
|
||||
|
||||
|
||||
def get_xls_file_by_data_list(data, convert_minus_to_null=False):
|
||||
try:
|
||||
|
||||
wb = Workbook()
|
||||
ws = wb.active
|
||||
|
||||
if type(data) == list and len(data) and type(data[0]) == dict:
|
||||
|
||||
i = 0
|
||||
for page in data:
|
||||
|
||||
title = None
|
||||
if 'title' in page:
|
||||
title = page['title']
|
||||
|
||||
# если первая страница - она уже создана, просто переименовываем
|
||||
if i == 0:
|
||||
if title:
|
||||
ws.title = title
|
||||
else:
|
||||
ws = wb.create_sheet(title)
|
||||
|
||||
headers_rows_count = 0
|
||||
if 'headers_rows_count' in page:
|
||||
headers_rows_count = page['headers_rows_count']
|
||||
|
||||
add_table_in_workbook(ws, page['table'], convert_minus_to_null, headers_rows_count)
|
||||
|
||||
i += 1
|
||||
else:
|
||||
add_table_in_workbook(ws, data, convert_minus_to_null)
|
||||
|
||||
xls_file = save_virtual_workbook(wb)
|
||||
|
||||
return xls_file
|
||||
|
||||
except Exception as e:
|
||||
msg = str(e)
|
||||
print(msg)
|
||||
return msg
|
||||
31
BaseModels/openAI/openAI_funcs.py
Normal file
31
BaseModels/openAI/openAI_funcs.py
Normal file
@@ -0,0 +1,31 @@
|
||||
import requests
|
||||
def send_request(msg):
|
||||
# url = 'https://api.openai.com/v1/chat/completions'
|
||||
# headers = {
|
||||
# 'Content-Type': 'application/json',
|
||||
# 'Authorization': 'Bearer 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'
|
||||
# }
|
||||
# data = {
|
||||
# "model": "gpt-3.5-turbo",
|
||||
# "messages": [{
|
||||
# "role": "user",
|
||||
# "content": msg
|
||||
# }]
|
||||
# }
|
||||
# res = requests.post(url=url, headers=headers, data=data)
|
||||
|
||||
import os
|
||||
import openai
|
||||
openai.api_key = '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'
|
||||
|
||||
res = openai.ChatCompletion.create(
|
||||
model="gpt-3.5-turbo",
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
"content": msg
|
||||
}
|
||||
]
|
||||
)
|
||||
|
||||
return res
|
||||
111
BaseModels/paging.py
Normal file
111
BaseModels/paging.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
__author__ = 'SDE'
|
||||
|
||||
def get_paging_Dict(request, elements_count, elements_on_page, from_page, to_page=None):
|
||||
|
||||
|
||||
|
||||
pages_count = elements_count / elements_on_page
|
||||
if elements_count % elements_on_page > 0:
|
||||
pages_count = pages_count + 1
|
||||
|
||||
pages = []
|
||||
|
||||
if to_page:
|
||||
cur_page = to_page
|
||||
else:
|
||||
cur_page = from_page
|
||||
|
||||
# количство страниц, которое отображается в ряд (без разделенного пэйджинга)
|
||||
pages_wo_separated_paging = 20
|
||||
|
||||
if pages_count<pages_wo_separated_paging+1:
|
||||
for page in range(1,pages_count+1):
|
||||
if page==1:
|
||||
pages.append((page, 1))
|
||||
else:
|
||||
pages.append((page, page))
|
||||
else:
|
||||
# количество страниц при разделенном пэйджинге в начале и конце пэйджинга, которые выводятся обязательно
|
||||
pages_count_for_begin_end_paging = 3
|
||||
|
||||
for p in range(1,pages_count_for_begin_end_paging+1):
|
||||
pages.append((p,p))
|
||||
# pages.append((1,1))
|
||||
# pages.append((2,2))
|
||||
# pages.append((3,3))
|
||||
|
||||
# количество страниц, которые находятся в центре разделенного пэджинга
|
||||
center_pages_count = 5
|
||||
|
||||
# количество страниц, при котором текущая страница будет находится в непрерывном ряду
|
||||
# 1 2 3 4 5 6 7 ... 1241 1242 1243
|
||||
# ^
|
||||
pages_count_wo_separate_for_side = pages_count_for_begin_end_paging + center_pages_count+1
|
||||
|
||||
if cur_page < pages_count_wo_separate_for_side:
|
||||
for p in range(pages_count_for_begin_end_paging+1,pages_count_wo_separate_for_side+1):
|
||||
pages.append((p,p))
|
||||
mid_rigth_page = (pages_count+1-pages_count_for_begin_end_paging-pages_count_wo_separate_for_side) / 2 #считаем сколько страниц внутри точек
|
||||
mid_rigth_page = pages_count_wo_separate_for_side + mid_rigth_page # это и есть средняя страница в правой части
|
||||
pages.append((u'...',mid_rigth_page))
|
||||
# if num_current_page < 7:
|
||||
# pages.append((4,4))
|
||||
# pages.append((5,5))
|
||||
# pages.append((6,6))
|
||||
# pages.append((7,7))
|
||||
# mid_rigth_page = (pages_count -2 -7) / 2 #считаем сколько страниц внутри точек
|
||||
# mid_rigth_page = 7 + mid_rigth_page # это и есть средняя страница в правой части
|
||||
# pages.append((u'...',mid_rigth_page))
|
||||
|
||||
elif cur_page>pages_count+1-pages_count_wo_separate_for_side:
|
||||
mid_left_page = (pages_count+1-pages_count_for_begin_end_paging-pages_count_wo_separate_for_side) / 2 #считаем сколько страниц внутри точек
|
||||
mid_left_page = pages_count_for_begin_end_paging + mid_left_page # это и есть средняя страница в левой части
|
||||
pages.append((u'...',mid_left_page))
|
||||
for p in range(pages_count+1-pages_count_wo_separate_for_side,pages_count-pages_count_for_begin_end_paging+1):
|
||||
pages.append((p,p))
|
||||
# elif num_current_page>pages_count-6:
|
||||
# mid_left_page = (pages_count -2 -7) / 2 #считаем сколько страниц внутри точек
|
||||
# mid_left_page = 3 + mid_left_page # это и есть средняя страница в левой части
|
||||
# pages.append((u'...',mid_left_page))
|
||||
# pages.append((pages_count-6,pages_count-6))
|
||||
# pages.append((pages_count-5,pages_count-5))
|
||||
# pages.append((pages_count-4,pages_count-4))
|
||||
# pages.append((pages_count-3,pages_count-3))
|
||||
else:
|
||||
mid_page = cur_page
|
||||
|
||||
mid_left_page = (mid_page-1 -pages_count_for_begin_end_paging)/2 + pages_count_for_begin_end_paging
|
||||
mid_rigth_page = (pages_count - (mid_page+pages_count_for_begin_end_paging))/2 + mid_page+1
|
||||
|
||||
# количество страниц, которые добавляются слева и и справа от текущей центральной
|
||||
pages_count_for_add_to_left_and_right_from_current_central_page = center_pages_count / 2
|
||||
|
||||
pages.append((u'...',mid_left_page))
|
||||
for p in range(mid_page-pages_count_for_add_to_left_and_right_from_current_central_page,mid_page+pages_count_for_add_to_left_and_right_from_current_central_page+1):
|
||||
pages.append((p,p))
|
||||
pages.append((u'...',mid_rigth_page))
|
||||
# mid_left_page = (mid_page-1 -3)/2 + 3
|
||||
# mid_rigth_page = (pages_count - (mid_page+1 +2))/2 + mid_page+1
|
||||
#
|
||||
# pages.append((u'...',mid_left_page))
|
||||
# pages.append((mid_page-1,mid_page-1))
|
||||
# pages.append((mid_page,mid_page))
|
||||
# pages.append((mid_page+1,mid_page+1))
|
||||
# pages.append((u'...',mid_rigth_page))
|
||||
|
||||
|
||||
for p in range(pages_count+1-pages_count_for_begin_end_paging,pages_count+1):
|
||||
pages.append((p,p))
|
||||
# pages.append((pages_count-2,pages_count-2))
|
||||
# pages.append((pages_count-1,pages_count-1))
|
||||
# pages.append((pages_count,pages_count))
|
||||
if not to_page:
|
||||
to_page = from_page
|
||||
|
||||
return {
|
||||
'paging' : pages,
|
||||
'from_page' : from_page,
|
||||
'to_page' : to_page,
|
||||
'max_page' : pages_count,
|
||||
}
|
||||
32
BaseModels/pil_graphic_utils.py
Normal file
32
BaseModels/pil_graphic_utils.py
Normal file
@@ -0,0 +1,32 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
__author__ = 'SDE'
|
||||
|
||||
|
||||
import os
|
||||
from PIL import Image
|
||||
|
||||
def get_thumbnail_url(image_url, size=150):
|
||||
thumbs_part = 'thumbs_' + str(size)
|
||||
image_url_parts = image_url.rsplit('/', 1)
|
||||
return image_url_parts[0] + '/' + thumbs_part + '/' + image_url_parts[1]
|
||||
|
||||
def get_thumbnail_path(image_path, size=150):
|
||||
thumbs_dir = 'thumbs_' + str(size)
|
||||
dirname, filename = os.path.split(image_path)
|
||||
dirname = os.path.join(dirname, thumbs_dir)
|
||||
if not os.path.exists(dirname):
|
||||
os.mkdir(dirname, mode=0o755)
|
||||
return os.path.join(dirname, filename)
|
||||
|
||||
def create_thumbnail(image_path, size=150):
|
||||
thumb_path = get_thumbnail_path(image_path, size)
|
||||
delete_thumbnail(image_path, size)
|
||||
img = Image.open(image_path)
|
||||
img.thumbnail((size, size), Image.ANTIALIAS)
|
||||
img.save(thumb_path)
|
||||
|
||||
def delete_thumbnail(image_path, size=150):
|
||||
thumb_path = get_thumbnail_path(image_path, size)
|
||||
if os.path.exists(thumb_path):
|
||||
os.remove(thumb_path)
|
||||
123
BaseModels/search_funcs.py
Normal file
123
BaseModels/search_funcs.py
Normal file
@@ -0,0 +1,123 @@
|
||||
try:
|
||||
import settings_local
|
||||
|
||||
pg_fts_config = 'pg_catalog.russian' # 'public.mipp_fulltext'
|
||||
except:
|
||||
pg_fts_config = 'pg_catalog.russian'
|
||||
|
||||
from django.db import models
|
||||
from django.contrib.postgres.search import Value, Func
|
||||
import copy
|
||||
|
||||
|
||||
# получаем из списка только слова содержащие цифры
|
||||
def get_list_words_contains_nums(txt):
|
||||
from .inter import numbers
|
||||
|
||||
if type(txt) == str:
|
||||
words = txt.split(' ')
|
||||
else:
|
||||
words = txt
|
||||
|
||||
words_w_nums = []
|
||||
|
||||
# получаем слова с цифрами
|
||||
res_words = []
|
||||
|
||||
for word in words:
|
||||
i = 0
|
||||
|
||||
while i < len(word):
|
||||
if word[i] in numbers:
|
||||
res_words.append(word)
|
||||
break
|
||||
i += 1
|
||||
|
||||
return res_words
|
||||
|
||||
|
||||
# получаем список слов с разделенными цифрами и текстом
|
||||
def get_list_split_words_w_nums(txt):
|
||||
from .inter import numbers
|
||||
|
||||
if type(txt) == str:
|
||||
words = txt.split(' ')
|
||||
else:
|
||||
words = txt
|
||||
|
||||
# words_w_nums = []
|
||||
|
||||
# получаем слова с цифрами
|
||||
words_w_devided_nums = []
|
||||
for word in copy.copy(words):
|
||||
|
||||
i = 0
|
||||
is_number = False
|
||||
cut_piece_compete = False
|
||||
|
||||
while i < len(word):
|
||||
if i == 0:
|
||||
if word[i] in numbers:
|
||||
is_number = True
|
||||
else:
|
||||
is_number = False
|
||||
else:
|
||||
if word[i] in numbers:
|
||||
if not is_number:
|
||||
cut_piece_compete = True
|
||||
else:
|
||||
if is_number:
|
||||
cut_piece_compete = True
|
||||
|
||||
if cut_piece_compete:
|
||||
cut_piece_compete = False
|
||||
words_w_devided_nums.append(word[0:i])
|
||||
|
||||
# if is_number:
|
||||
# words_w_nums.append(word[0:i])
|
||||
|
||||
word = word[i:]
|
||||
i = 0
|
||||
else:
|
||||
i += 1
|
||||
|
||||
if i > 0:
|
||||
words_w_devided_nums.append(word[0:i])
|
||||
# if is_number:
|
||||
# words_w_nums.append(word[0:i])
|
||||
|
||||
return words_w_devided_nums
|
||||
|
||||
|
||||
class Headline(Func):
|
||||
function = 'ts_headline'
|
||||
|
||||
def __init__(self, field, query, config=None, options=None, **extra):
|
||||
expressions = [field, query]
|
||||
if config:
|
||||
expressions.insert(0, Value(config))
|
||||
if options:
|
||||
expressions.append(Value(options))
|
||||
extra.setdefault('output_field', models.TextField())
|
||||
super(Headline, self).__init__(*expressions, **extra)
|
||||
|
||||
|
||||
def get_search_lexems_list(search_phrase):
|
||||
from django.db import connection
|
||||
search_lexems_list = None
|
||||
|
||||
cursor = connection.cursor()
|
||||
cursor.execute("SET NAMES 'UTF8';")
|
||||
# cursor.execute(u"SET CHARACTER SET 'utf8';")
|
||||
# cursor.execute(u"SET character_set_connection='utf8';")
|
||||
cursor.execute("SELECT plainto_tsquery('{1}', '{0}');".format(search_phrase, pg_fts_config))
|
||||
search_lexems = cursor.fetchone()
|
||||
s = search_lexems[0] # .decode('utf8')
|
||||
|
||||
if search_lexems:
|
||||
search_lexems = s.replace('\\', '')
|
||||
search_lexems = search_lexems.replace("'", '')
|
||||
search_lexems = search_lexems.replace(" ", '')
|
||||
search_lexems_list = search_lexems.split('&')
|
||||
|
||||
return search_lexems_list
|
||||
2
BaseModels/search_optimization/OEMBED/OEMBED_data.py
Normal file
2
BaseModels/search_optimization/OEMBED/OEMBED_data.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# https://oembed.com/
|
||||
# https://habr.com/ru/post/141303/
|
||||
1
BaseModels/search_optimization/RSS/ya_RSS_chanel.py
Normal file
1
BaseModels/search_optimization/RSS/ya_RSS_chanel.py
Normal file
@@ -0,0 +1 @@
|
||||
# https://yandex.ru/dev/turbo/doc/quick-start/articles.html
|
||||
0
BaseModels/search_optimization/__init__.py
Normal file
0
BaseModels/search_optimization/__init__.py
Normal file
2
BaseModels/search_optimization/google_AMP/AMP.py
Normal file
2
BaseModels/search_optimization/google_AMP/AMP.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# https://amp.dev/ru/
|
||||
# https://www.seonews.ru/analytics/optimization-2020-vnedrenie-amp-dlya-internet-magazina-bez-poteri-konversii-v-google/
|
||||
@@ -0,0 +1 @@
|
||||
# https://developers.google.com/search/docs/advanced/appearance/enable-web-stories?hl=ru#google-discover
|
||||
1
BaseModels/search_optimization/google_tips
Normal file
1
BaseModels/search_optimization/google_tips
Normal file
@@ -0,0 +1 @@
|
||||
https://developers.google.com/search/docs/beginner/seo-starter-guide?hl=ru#understand_your_content
|
||||
0
BaseModels/search_optimization/ld_json/__init__.py
Normal file
0
BaseModels/search_optimization/ld_json/__init__.py
Normal file
38
BaseModels/search_optimization/ld_json/ld_ speakebale.py
Normal file
38
BaseModels/search_optimization/ld_json/ld_ speakebale.py
Normal file
@@ -0,0 +1,38 @@
|
||||
|
||||
|
||||
def get_ld_speakebale(name, theme_xpath, info_xpath, url):
|
||||
|
||||
data = {
|
||||
"@context": "https://schema.org/",
|
||||
"@type": "WebPage",
|
||||
"name": name,
|
||||
"speakable": {
|
||||
"@type": "SpeakableSpecification",
|
||||
"xPath": [
|
||||
theme_xpath,
|
||||
info_xpath
|
||||
]
|
||||
},
|
||||
"url": url
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
# <title>Speakable markup example</title>
|
||||
# <meta name="description" content="This page is all about the quick brown fox" />
|
||||
# <script type="application/ld+json">
|
||||
# {
|
||||
# "@context": "https://schema.org/",
|
||||
# "@type": "WebPage",
|
||||
# "name": "Quick Brown Fox",
|
||||
# "speakable":
|
||||
# {
|
||||
# "@type": "SpeakableSpecification",
|
||||
# "xPath": [
|
||||
# "/html/head/title",
|
||||
# "/html/head/meta[@name='description']/@content"
|
||||
# ]
|
||||
# },
|
||||
# "url": "http://www.quickbrownfox_example.com/quick-brown-fox"
|
||||
# }
|
||||
# </script>
|
||||
22
BaseModels/search_optimization/ld_json/ld_FAQ.py
Normal file
22
BaseModels/search_optimization/ld_json/ld_FAQ.py
Normal file
@@ -0,0 +1,22 @@
|
||||
|
||||
|
||||
def get_ld_FAQ(data_Dict):
|
||||
|
||||
FAQ_list = []
|
||||
for key, val in data_Dict.items():
|
||||
FAQ_list.append({
|
||||
"@type": "Question",
|
||||
"name": key,
|
||||
"acceptedAnswer": {
|
||||
"@type": "Answer",
|
||||
"text": val
|
||||
}
|
||||
})
|
||||
|
||||
data = {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "FAQPage",
|
||||
"mainEntity": FAQ_list
|
||||
}
|
||||
|
||||
return data
|
||||
36
BaseModels/search_optimization/ld_json/ld_QA.py
Normal file
36
BaseModels/search_optimization/ld_json/ld_QA.py
Normal file
@@ -0,0 +1,36 @@
|
||||
|
||||
|
||||
def get_ld_QA(data_Dict):
|
||||
|
||||
data = {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "QAPage",
|
||||
"mainEntity": {
|
||||
"@type": "Question",
|
||||
"name": "How many ounces are there in a pound?",
|
||||
"text": "I have taken up a new interest in baking and keep running across directions in ounces and pounds. I have to translate between them and was wondering how many ounces are in a pound?",
|
||||
"answerCount": 3,
|
||||
"upvoteCount": 26,
|
||||
"acceptedAnswer": {
|
||||
"@type": "Answer",
|
||||
"text": "1 pound (lb) is equal to 16 ounces (oz).",
|
||||
"upvoteCount": 1337,
|
||||
"url": "https://example.com/question1#acceptedAnswer"
|
||||
},
|
||||
"suggestedAnswer": [
|
||||
{
|
||||
"@type": "Answer",
|
||||
"text": "Are you looking for ounces or fluid ounces? If you are looking for fluid ounces there are 15.34 fluid ounces in a pound of water.",
|
||||
"upvoteCount": 42,
|
||||
"url": "https://example.com/question1#suggestedAnswer1"
|
||||
}, {
|
||||
"@type": "Answer",
|
||||
"text": " I can't remember exactly, but I think 18 ounces in a lb. You might want to double check that.",
|
||||
"upvoteCount": 0,
|
||||
"url": "https://example.com/question1#suggestedAnswer2"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
return data
|
||||
41
BaseModels/search_optimization/ld_json/ld_article_news.py
Normal file
41
BaseModels/search_optimization/ld_json/ld_article_news.py
Normal file
@@ -0,0 +1,41 @@
|
||||
|
||||
import json
|
||||
|
||||
import project_sets
|
||||
from project_sets import *
|
||||
from django.urls import reverse
|
||||
from django.utils.html import strip_tags
|
||||
|
||||
def get_ld_article_news(art_name, art_txt, art_DT, url_data):
|
||||
from BaseModels.inter import get_all_photos_from_html_content
|
||||
|
||||
img_list = get_all_photos_from_html_content(art_txt)
|
||||
if img_list:
|
||||
img_list = list(map(lambda img: "{0}{1}".format(project_sets.domain, img), img_list))
|
||||
|
||||
data = {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "NewsArticle",
|
||||
"url": "{0}{1}".format(project_sets.domain, reverse(**url_data)),
|
||||
"publisher":{
|
||||
"@type":"Organization",
|
||||
"name": project_sets.company_name,
|
||||
"logo": project_sets.logo
|
||||
},
|
||||
"author": {
|
||||
"@type": "Organization",
|
||||
"name": project_sets.company_name,
|
||||
"logo": project_sets.logo,
|
||||
"url": project_sets.domain,
|
||||
},
|
||||
"headline": art_name,
|
||||
# "mainEntityOfPage": "http://www.bbc.com/news/world-us-canada-39324587", # ссылка на источник
|
||||
"articleBody": strip_tags(art_txt),
|
||||
"datePublished": art_DT.isoformat()
|
||||
}
|
||||
if img_list:
|
||||
data.update({
|
||||
'image': img_list
|
||||
})
|
||||
|
||||
return json.dumps(data)
|
||||
39
BaseModels/search_optimization/ld_json/ld_breadcrambs.py
Normal file
39
BaseModels/search_optimization/ld_json/ld_breadcrambs.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import json
|
||||
|
||||
def get_ld_breadcrambs(items_list):
|
||||
|
||||
elements_list = []
|
||||
i = 1
|
||||
while i <= len(items_list):
|
||||
item = items_list[i-1]
|
||||
url = None
|
||||
if type(item) == str:
|
||||
name = item
|
||||
elif type(item) == dict:
|
||||
name = item['name']
|
||||
url = item['url']
|
||||
else:
|
||||
name = item.name
|
||||
url = item.url
|
||||
|
||||
Dict = {
|
||||
"@type": "ListItem",
|
||||
"position": i,
|
||||
"name": name,
|
||||
}
|
||||
if i < len(items_list):
|
||||
Dict.update({
|
||||
"item": url
|
||||
})
|
||||
|
||||
elements_list.append(Dict)
|
||||
|
||||
i += 1
|
||||
|
||||
data = {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "BreadcrumbList",
|
||||
"itemListElement": elements_list
|
||||
}
|
||||
|
||||
return json.dumps(data)
|
||||
243
BaseModels/search_optimization/ld_json/ld_company.py
Normal file
243
BaseModels/search_optimization/ld_json/ld_company.py
Normal file
@@ -0,0 +1,243 @@
|
||||
import json
|
||||
|
||||
import project_sets
|
||||
from collections import OrderedDict
|
||||
|
||||
def get_ld_logo():
|
||||
data = {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "Organization",
|
||||
"url": project_sets.domain,
|
||||
"logo": project_sets.logo
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
def get_ld_company(offices):
|
||||
try:
|
||||
main_office = offices.get(main_office=True)
|
||||
except:
|
||||
main_office = offices[0]
|
||||
|
||||
|
||||
data = {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "LocalBusiness",
|
||||
"logo": project_sets.logo,
|
||||
}
|
||||
|
||||
ld_for_main_office = get_ld_office(main_office)
|
||||
data.update(ld_for_main_office)
|
||||
|
||||
|
||||
departments = []
|
||||
for office in offices:
|
||||
if office == main_office:
|
||||
continue
|
||||
|
||||
departments.append(get_ld_office(office))
|
||||
|
||||
# if departments:
|
||||
# data.update({
|
||||
# 'department': departments
|
||||
# })
|
||||
|
||||
return json.dumps(data)
|
||||
|
||||
|
||||
def get_ld_office(office):
|
||||
|
||||
try:
|
||||
phones = office.phones()
|
||||
except:
|
||||
phones = []
|
||||
|
||||
if not phones:
|
||||
try:
|
||||
phones = office.rel_contacts_for_office
|
||||
except:
|
||||
phones = []
|
||||
|
||||
data = {
|
||||
"name": office.name,
|
||||
}
|
||||
|
||||
# На каждой странице (с разметкой или без нее) должно присутствовать хотя бы одно изображение. Робот Google выберет лучшее изображение для показа в результатах поиска с учетом соотношения сторон и разрешения.
|
||||
# URL изображений должны быть доступны для сканирования и индексирования. Проверить, есть ли у поискового робота Google доступ к URL вашего контента, можно с помощью инструмента, описанного в этой статье.
|
||||
# Изображения должны соответствовать размеченному контенту.
|
||||
# Допускаются только графические файлы форматов, совместимых с Google Картинками.
|
||||
# Предоставьте несколько изображений в высоком разрешении (не менее 50 000 пикселей по произведению ширины и высоты) со следующими соотношениями сторон: 16 × 9, 4 × 3 и 1 × 1.
|
||||
data.update({
|
||||
"image": [
|
||||
project_sets.logo,
|
||||
]
|
||||
})
|
||||
|
||||
# data.update({
|
||||
# "@type": "Store",
|
||||
# })
|
||||
# не обязательно!
|
||||
|
||||
# AnimalShelter
|
||||
# ArchiveOrganization
|
||||
# AutomotiveBusiness
|
||||
# ChildCare
|
||||
# Dentist
|
||||
# DryCleaningOrLaundry
|
||||
# EmergencyService
|
||||
# EmploymentAgency
|
||||
# EntertainmentBusiness
|
||||
# FinancialService
|
||||
# FoodEstablishment
|
||||
# GovernmentOffice
|
||||
# HealthAndBeautyBusiness
|
||||
# HomeAndConstructionBusiness
|
||||
# InternetCafe
|
||||
# LegalService
|
||||
# Library
|
||||
# LodgingBusiness
|
||||
# MedicalBusiness
|
||||
# ProfessionalService
|
||||
# RadioStation
|
||||
# RealEstateAgent
|
||||
# RecyclingCenter
|
||||
# SelfStorage
|
||||
# ShoppingCenter
|
||||
# SportsActivityLocation
|
||||
# Store
|
||||
# TelevisionStation
|
||||
# TouristInformationCenter
|
||||
# TravelAgency
|
||||
|
||||
i_Dict = {
|
||||
"address": {
|
||||
"@type": "PostalAddress",
|
||||
"streetAddress": office.address,
|
||||
"addressLocality": office.city,
|
||||
# "addressRegion": "CA",
|
||||
# "postalCode": "95129",
|
||||
# "addressCountry": "US"
|
||||
},
|
||||
}
|
||||
if phones:
|
||||
i_Dict["address"].update({
|
||||
"telephone": '{0}{1}'.format(phones[0].prefix, phones[0].nomber_phone),
|
||||
})
|
||||
|
||||
data.update(i_Dict)
|
||||
|
||||
gps_longitude = getattr(office, 'gps_longitude', None)
|
||||
gps_latitude = getattr(office, 'gps_latitude', None)
|
||||
if not gps_longitude:
|
||||
gps_longitude = getattr(project_sets, 'gps_longitude', None)
|
||||
if not gps_latitude:
|
||||
gps_latitude = getattr(project_sets, 'gps_latitude', None)
|
||||
if gps_longitude and gps_latitude:
|
||||
i_Dict = {
|
||||
"geo": {
|
||||
"@type": "GeoCoordinates",
|
||||
"latitude": gps_latitude,
|
||||
"longitude": gps_longitude
|
||||
},
|
||||
}
|
||||
data.update(i_Dict)
|
||||
|
||||
data.update({
|
||||
"url": project_sets.domain
|
||||
})
|
||||
|
||||
# "foundingDate": "2005-02-07", # дата основания
|
||||
|
||||
company_reference_links = getattr(project_sets, 'company_reference_links')
|
||||
if company_reference_links:
|
||||
data.update({
|
||||
"sameAs": company_reference_links
|
||||
})
|
||||
|
||||
priceRange = getattr(office, 'priceRange', '$')
|
||||
if priceRange:
|
||||
data.update({
|
||||
"priceRange": priceRange
|
||||
})
|
||||
|
||||
work_time_from = getattr(office, 'work_time_from', None)
|
||||
if not work_time_from:
|
||||
work_time_from = getattr(project_sets, 'work_time_from', '9:00')
|
||||
work_time_to = getattr(office, 'work_time_to', None)
|
||||
if not work_time_to:
|
||||
work_time_to = getattr(project_sets, 'work_time_to', '18:00')
|
||||
|
||||
i_Dict = {
|
||||
"openingHoursSpecification": [
|
||||
{
|
||||
"@type": "OpeningHoursSpecification",
|
||||
"dayOfWeek": [
|
||||
"https://schema.org/Monday",
|
||||
"https://schema.org/Tuesday",
|
||||
"https://schema.org/Wednesday",
|
||||
"https://schema.org/Thursday",
|
||||
"https://schema.org/Friday",
|
||||
# "https://schema.org/Saturday"
|
||||
],
|
||||
"opens": work_time_from,
|
||||
"closes": work_time_to
|
||||
},
|
||||
# {
|
||||
# "@type": "OpeningHoursSpecification",
|
||||
# "dayOfWeek": "Sunday",
|
||||
# "opens": "08:00",
|
||||
# "closes": "23:00"
|
||||
# }
|
||||
],
|
||||
}
|
||||
# i_Dict = {
|
||||
# "openingHoursSpecification": [
|
||||
# {
|
||||
# "@type": "OpeningHoursSpecification",
|
||||
# "dayOfWeek": "https://schema.org/Monday",
|
||||
# "opens": work_time_from,
|
||||
# "closes": work_time_to
|
||||
# },
|
||||
# {
|
||||
# "@type": "OpeningHoursSpecification",
|
||||
# "dayOfWeek": "https://schema.org/Tuesday",
|
||||
# "opens": work_time_from,
|
||||
# "closes": work_time_to
|
||||
# },
|
||||
# {
|
||||
# "@type": "OpeningHoursSpecification",
|
||||
# "dayOfWeek": "https://schema.org/Wednesday",
|
||||
# "opens": work_time_from,
|
||||
# "closes": work_time_to
|
||||
# },
|
||||
# {
|
||||
# "@type": "OpeningHoursSpecification",
|
||||
# "dayOfWeek": "https://schema.org/Thursday",
|
||||
# "opens": work_time_from,
|
||||
# "closes": work_time_to
|
||||
# },
|
||||
# {
|
||||
# "@type": "OpeningHoursSpecification",
|
||||
# "dayOfWeek": "https://schema.org/Friday",
|
||||
# "opens": work_time_from,
|
||||
# "closes": work_time_to
|
||||
# },
|
||||
# {
|
||||
# "@type": "OpeningHoursSpecification",
|
||||
# "dayOfWeek": "https://schema.org/Saturday",
|
||||
# "opens": work_time_from,
|
||||
# "closes": work_time_to
|
||||
# },
|
||||
# {
|
||||
# "@type": "OpeningHoursSpecification",
|
||||
# "dayOfWeek": "https://schema.org/Sunday",
|
||||
# "opens": work_time_from,
|
||||
# "closes": work_time_to
|
||||
# },
|
||||
# ],
|
||||
# }
|
||||
data.update(i_Dict)
|
||||
|
||||
return data
|
||||
|
||||
|
||||
@@ -0,0 +1,53 @@
|
||||
import json
|
||||
import re
|
||||
|
||||
import project_sets
|
||||
from django.urls import reverse
|
||||
from django.utils.html import strip_tags
|
||||
|
||||
|
||||
def create_videoobject(video_path, name, description, DT):
|
||||
video_id = video_path.split('/')[-1]
|
||||
thumbs = list(map(lambda s: "https://img.youtube.com/vi/{0}/{1}.jpg".format(video_id, str(s)), range(1, 5)))
|
||||
|
||||
data = {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "VideoObject",
|
||||
"name": name,
|
||||
"description": description,
|
||||
"thumbnailUrl": thumbs,
|
||||
"uploadDate": DT.isoformat(),
|
||||
# "duration": "PT1M54S", # продолжительность видео
|
||||
# "contentUrl": "https://www.example.com/video/123/file.mp4", # адрес к видеофайлу
|
||||
"embedUrl": video_path,
|
||||
# "interactionStatistic": { # количество просмотров
|
||||
# "@type": "InteractionCounter",
|
||||
# "interactionType": { "@type": "WatchAction" },
|
||||
# "userInteractionCount": 5647018
|
||||
# },
|
||||
# "regionsAllowed": "US,NL" # разрешенные регионы
|
||||
}
|
||||
return data
|
||||
|
||||
|
||||
def get_ld_videoobjects_for_page_html(obj, name, description, DT, content):
|
||||
from BaseModels.inter import get_all_videos_from_html_content
|
||||
res_list = []
|
||||
|
||||
if obj.video:
|
||||
data = create_videoobject(obj.video, name, description, DT)
|
||||
res_list.append(json.dumps(data))
|
||||
|
||||
if not content:
|
||||
return res_list
|
||||
|
||||
videos_list = get_all_videos_from_html_content(content)
|
||||
# if videos_list:
|
||||
# img_list = list(map(lambda img: "{0}{1}".format(project_sets.domain, img), videos_list))
|
||||
|
||||
for video_path in videos_list:
|
||||
if not video_path in obj.video and not obj.video in video_path:
|
||||
data = create_videoobject(video_path, name, description, DT)
|
||||
res_list.append(json.dumps(data))
|
||||
|
||||
return res_list
|
||||
178
BaseModels/search_optimization/ld_json/ld_product.py
Normal file
178
BaseModels/search_optimization/ld_json/ld_product.py
Normal file
@@ -0,0 +1,178 @@
|
||||
# import json
|
||||
#
|
||||
# import project_sets
|
||||
# from BaseModels.functions import add_domain
|
||||
#
|
||||
#
|
||||
# def get_ld_shipping_data_for_product(shipping_terms):
|
||||
# shipping_terms_list = []
|
||||
# for item in shipping_terms:
|
||||
# data = {
|
||||
# "@type": "OfferShippingDetails",
|
||||
# "shippingRate": {
|
||||
# "@type": "MonetaryAmount",
|
||||
# "value": item.price,
|
||||
# "currency": project_sets.base_currency
|
||||
# },
|
||||
# "shippingDestination": {
|
||||
# "@type": "DefinedRegion",
|
||||
# "addressCountry": project_sets.shipping_region, # обязательно
|
||||
# # "postalCodeRange": {
|
||||
# # "postalCodeBegin": "98100",
|
||||
# # "postalCodeEnd": "98199"
|
||||
# # }
|
||||
# },
|
||||
# "deliveryTime": {
|
||||
# "@type": "ShippingDeliveryTime",
|
||||
# "cutOffTime": project_sets.cutOffTime, # "19:30-08:00",
|
||||
#
|
||||
# # Стандартное время от получения оплаты до отправки товаров со склада (или подготовки к самовывозу, если используется такой вариант)
|
||||
# "handlingTime": {
|
||||
# "@type": "QuantitativeValue",
|
||||
# "minValue": "0", # дней
|
||||
# "maxValue": "1" # дней
|
||||
# },
|
||||
# # Стандартное время от отправки заказа до его прибытия к конечному покупателю.
|
||||
# "transitTime": {
|
||||
# "@type": "QuantitativeValue",
|
||||
# "minValue": "1", # дней
|
||||
# "maxValue": "5" # дней
|
||||
# },
|
||||
# # Время, после которого новые заказы не обрабатываются в тот же день
|
||||
#
|
||||
# # Дни недели, по которым вы обрабатываете заказы
|
||||
# "businessDays": {
|
||||
# "@type": "OpeningHoursSpecification",
|
||||
# "dayOfWeek": ["https://schema.org/Monday", "https://schema.org/Tuesday",
|
||||
# "https://schema.org/Wednesday", "https://schema.org/Thursday"]
|
||||
# }
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# shipping_terms_list.append(data)
|
||||
#
|
||||
# data = {
|
||||
# "shippingDetails": shipping_terms_list
|
||||
# }
|
||||
#
|
||||
# return data
|
||||
#
|
||||
#
|
||||
# def get_ld_offers_for_product(product, domain, shipping_terms):
|
||||
# data = {
|
||||
# "offers": {
|
||||
# "@type": "Offer",
|
||||
# "url": '{0}{1}'.format(domain, product.get_site_url()),
|
||||
# "itemCondition": "https://schema.org/NewCondition",
|
||||
# # "https://schema.org/NewCondition"
|
||||
# # "https://schema.org/UsedCondition"
|
||||
# "availability": "https://schema.org/InStock",
|
||||
# # https://schema.org/BackOrder
|
||||
# # https://schema.org/Discontinued
|
||||
# # https://schema.org/InStock
|
||||
# # https://schema.org/InStoreOnly
|
||||
# # https://schema.org/LimitedAvailability
|
||||
# # https://schema.org/OnlineOnly
|
||||
# # https://schema.org/OutOfStock
|
||||
# # https://schema.org/PreOrder
|
||||
# # https://schema.org/PreSale
|
||||
# # https://schema.org/SoldOut
|
||||
# "price": str(product.price),
|
||||
# "priceCurrency": project_sets.base_currency,
|
||||
# # "priceValidUntil": "2020-11-20", #дата окончания действия цены
|
||||
# # "shippingSettingsLink": '{0}{1}'.format(project_sets.domain, 'delivery/'),
|
||||
#
|
||||
# },
|
||||
# }
|
||||
#
|
||||
# if shipping_terms:
|
||||
# data["offers"].update(get_ld_shipping_data_for_product(shipping_terms))
|
||||
#
|
||||
# return data
|
||||
#
|
||||
#
|
||||
# def get_aggregate_rating(product):
|
||||
# data = {
|
||||
# # "review": {
|
||||
# # "@type": "Review",
|
||||
# # "reviewRating": {
|
||||
# # "@type": "Rating",
|
||||
# # "ratingValue": "4",
|
||||
# # "bestRating": "5"
|
||||
# # },
|
||||
# # "author": {
|
||||
# # "@type": "Person",
|
||||
# # "name": "Fred Benson"
|
||||
# # }
|
||||
# # },
|
||||
# "aggregateRating": {
|
||||
# "@type": "AggregateRating",
|
||||
# "ratingValue": product.ratingValue,
|
||||
# "reviewCount": product.reviewCount
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# return data
|
||||
#
|
||||
#
|
||||
# def get_ld_product(product, domain, shipping_terms):
|
||||
# from GeneralApp.views import get_cur_domain
|
||||
# serv_domain, local_domain = get_cur_domain()
|
||||
#
|
||||
# data = {
|
||||
# "@context": "https://schema.org/",
|
||||
# "@type": "Product",
|
||||
# "name": product.name,
|
||||
# "sku": '{0}-{1}'.format(str(product.brand), str(product.article)),
|
||||
# "url": '{0}{1}'.format(domain, product.get_site_url()),
|
||||
# }
|
||||
#
|
||||
# if product.description:
|
||||
# data.update({
|
||||
# "description": product.description,
|
||||
# })
|
||||
#
|
||||
# barcode = getattr(product, 'barcode', None)
|
||||
# if barcode:
|
||||
# data.update({
|
||||
# "gtin14": barcode,
|
||||
# })
|
||||
#
|
||||
# gallery = getattr(product, 'gallery', None)
|
||||
# if gallery:
|
||||
# try:
|
||||
# photos = gallery.get_photos()
|
||||
# photos = list(map(lambda ph: '{0}{1}'.format(serv_domain, ph), photos))
|
||||
# except Exception as e:
|
||||
# photos = None
|
||||
#
|
||||
# if photos:
|
||||
# data.update({
|
||||
# "image": photos,
|
||||
# })
|
||||
#
|
||||
# brand = getattr(product, 'brand', None)
|
||||
# if brand:
|
||||
# if type(brand) not in [str]:
|
||||
# brand = brand.name
|
||||
#
|
||||
# data.update({
|
||||
# "brand": {
|
||||
# "@type": "Brand",
|
||||
# "name": brand
|
||||
# },
|
||||
# })
|
||||
#
|
||||
# FAQ = {}
|
||||
#
|
||||
# from ...
|
||||
#
|
||||
# aggregate_rating = getattr(product, 'ratingValue', None)
|
||||
# if aggregate_rating != None:
|
||||
# data.update(get_aggregate_rating(product))
|
||||
#
|
||||
# price = getattr(product, 'price', None)
|
||||
# if price:
|
||||
# data.update(get_ld_offers_for_product(product, domain, shipping_terms))
|
||||
#
|
||||
# return json.dumps(data)
|
||||
22
BaseModels/search_optimization/ld_json/ld_search.py
Normal file
22
BaseModels/search_optimization/ld_json/ld_search.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import json
|
||||
import project_sets
|
||||
|
||||
def get_ld_search(domain):
|
||||
|
||||
# Только для главной страницы
|
||||
|
||||
data = {
|
||||
"@context": "https://schema.org",
|
||||
"@type": "WebSite",
|
||||
"url": domain, #"https://truenergy.by/",
|
||||
"potentialAction": {
|
||||
"@type": "SearchAction",
|
||||
"target": {
|
||||
"@type": "EntryPoint",
|
||||
"urlTemplate": "{domain}/{search_term_string}/".format(domain=domain, search_term_string='{search_term_string}')
|
||||
},
|
||||
"query-input": "required name=search_term_string"
|
||||
}
|
||||
}
|
||||
|
||||
return json.dumps(data)
|
||||
140
BaseModels/search_optimization/ld_json/ld_vacancy.py
Normal file
140
BaseModels/search_optimization/ld_json/ld_vacancy.py
Normal file
@@ -0,0 +1,140 @@
|
||||
import datetime
|
||||
import project_sets
|
||||
|
||||
|
||||
def get_ld_vacancies(data_Dict):
|
||||
|
||||
# Разметку JobPosting можно размещать только на страницах, которые содержат одно объявление о вакансии.
|
||||
# Не разрешается добавлять разметку JobPosting на какие-либо другие страницы, в том числе те, на которых нет информации ни об одной вакансии.
|
||||
|
||||
vacancies_list = []
|
||||
|
||||
for item in data_Dict:
|
||||
data = {
|
||||
"@context": "https://schema.org/",
|
||||
"@type": "JobPosting",
|
||||
"title": item['title'],
|
||||
"description": item['description'],
|
||||
"datePosted": datetime.datetime.now().strftime('%Y-%m-%d'),
|
||||
"validThrough": item['validThrough'].strftime('%Y-%m-%dT%H:%M'), #"2017-03-18T00:00", # окончание срока действия
|
||||
"identifier": {
|
||||
"@type": "PropertyValue",
|
||||
"name": project_sets.company_name,
|
||||
"value": str(item['id'])
|
||||
},
|
||||
"hiringOrganization": {
|
||||
"@type": "Organization",
|
||||
"name": project_sets.company_name,
|
||||
"sameAs": project_sets.domain,
|
||||
"logo": project_sets.logo
|
||||
},
|
||||
}
|
||||
|
||||
if 'office' in item:
|
||||
# используется для указания места, в котором сотрудник будет выполнять работу. Если определенного места (например, офиса или производственной площадки) нет, использовать это свойство не обязательно.
|
||||
job_place_Dict = {
|
||||
"jobLocation": {
|
||||
"@type": "Place",
|
||||
"address": {
|
||||
"@type": "PostalAddress",
|
||||
"streetAddress": item['office'].address,
|
||||
"addressLocality": item['office'].city,
|
||||
"addressCountry": "BY"
|
||||
},
|
||||
},
|
||||
}
|
||||
else:
|
||||
job_place_Dict = {
|
||||
"jobLocationType": "TELECOMMUTE" # только удаленка
|
||||
}
|
||||
data.update(job_place_Dict)
|
||||
|
||||
if 'required_country_of_residence' in item:
|
||||
# используется для указания территории, на которой может проживать кандидат на должность. Необходимо, чтобы была задана по меньшей мере одна страна
|
||||
required_country_of_residence = {
|
||||
"applicantLocationRequirements": {
|
||||
"@type": "Country",
|
||||
"name": item['required_country_of_residence']['country']
|
||||
},
|
||||
}
|
||||
data.update(required_country_of_residence)
|
||||
|
||||
if 'salary' in item:
|
||||
salary_Dict = {
|
||||
"baseSalary": {
|
||||
"@type": "MonetaryAmount",
|
||||
"currency": item['salary']['currency'],
|
||||
"value": {
|
||||
"@type": "QuantitativeValue",
|
||||
"unitText": item['salary']['time_unit']
|
||||
# HOUR
|
||||
# DAY
|
||||
# WEEK
|
||||
# MONTH
|
||||
# YEAR
|
||||
}
|
||||
}
|
||||
}
|
||||
if 'price' in item['salary']:
|
||||
salary_Dict['baseSalary']['value']['value'] = item['salary']['price']
|
||||
elif 'price_from' in item['salary']:
|
||||
salary_Dict['baseSalary']['value']['minValue'] = item['salary']['price_from']
|
||||
|
||||
if 'price_to' in item['salary']:
|
||||
salary_Dict['baseSalary']['value']['maxValue'] = item['salary']['price_to']
|
||||
|
||||
data.update(salary_Dict)
|
||||
|
||||
# Указание на то, поддерживается ли на странице с объявлением о вакансии отправка резюме напрямую.
|
||||
data.update({
|
||||
'directApply': item['directApply']
|
||||
})
|
||||
|
||||
# Вид занятости Укажите одно или несколько значений
|
||||
if 'employmentType' in item:
|
||||
# FULL_TIME
|
||||
# PART_TIME
|
||||
# CONTRACTOR
|
||||
# TEMPORARY
|
||||
# INTERN
|
||||
# VOLUNTEER
|
||||
# PER_DIEM
|
||||
# OTHER
|
||||
data.update({
|
||||
'employmentType': item['employmentType']
|
||||
})
|
||||
|
||||
if 'educationRequirements' in item:
|
||||
e_Dict = {
|
||||
"educationRequirements": {
|
||||
"@type": "EducationalOccupationalCredential",
|
||||
"credentialCategory": item['educationRequirements']
|
||||
# high school
|
||||
# associate degree
|
||||
# bachelor degree
|
||||
# professional certificate
|
||||
# postgraduate degree
|
||||
},
|
||||
}
|
||||
data.update(e_Dict)
|
||||
|
||||
if 'experienceRequirements' in item:
|
||||
e_Dict = {
|
||||
"experienceRequirements": {
|
||||
"@type": "OccupationalExperienceRequirements",
|
||||
"monthsOfExperience": item['experienceRequirements'] # опыт работы в месяцах
|
||||
},
|
||||
}
|
||||
data.update(e_Dict)
|
||||
|
||||
# Со значением "истина" это свойство будет указывать на то, что кандидатам достаточно иметь опыт, если у них нет требуемого образования
|
||||
if 'required_only_experience' in item:
|
||||
if 'experienceRequirements' in item and 'educationRequirements' in item:
|
||||
data.update({
|
||||
'experienceInPlaceOfEducation': item['required_only_experience']
|
||||
})
|
||||
|
||||
vacancies_list.append(data)
|
||||
|
||||
return vacancies_list
|
||||
|
||||
1
BaseModels/search_optimization/ya_YML/ya_YML.py
Normal file
1
BaseModels/search_optimization/ya_YML/ya_YML.py
Normal file
@@ -0,0 +1 @@
|
||||
# https://yandex.ru/dev/turbo-shop/doc/quick-start/markets.html
|
||||
1
BaseModels/search_optimization/ya_tips
Normal file
1
BaseModels/search_optimization/ya_tips
Normal file
@@ -0,0 +1 @@
|
||||
https://yandex.ru/support/webmaster/index.html
|
||||
258
BaseModels/seo_text_generators.py
Normal file
258
BaseModels/seo_text_generators.py
Normal file
@@ -0,0 +1,258 @@
|
||||
from BaseModels.inter import cut_to_number_w_point
|
||||
|
||||
|
||||
def generate_seotext_by_properties(product_data_Dict):
|
||||
|
||||
power_txt = ''
|
||||
ip_txt = ''
|
||||
lm_txt = ''
|
||||
temp_txt = ''
|
||||
install_txt = ''
|
||||
diametr_txt = ''
|
||||
|
||||
try:
|
||||
|
||||
if 'diameter' in product_data_Dict:
|
||||
val = int(product_data_Dict['diameter'])
|
||||
else:
|
||||
val = int(product_data_Dict['width'])
|
||||
|
||||
diametr_txt = '{0} truEnergy {1} серии {2}.<br>'.format(
|
||||
product_data_Dict['product_type']['name'].upper(),
|
||||
product_data_Dict['article'],
|
||||
product_data_Dict['product_series']['name'].upper()
|
||||
)
|
||||
|
||||
# if product_data_Dict['product_type']['name'] == 'Светильник светодиодный':
|
||||
#
|
||||
# if val < 100:
|
||||
# diametr_txt = '{0} truEnergy {1} серии {2} - это хорошее решение для дома.<br>'.format(
|
||||
# product_data_Dict['product_type']['name'].upper(),
|
||||
# product_data_Dict['article'],
|
||||
# product_data_Dict['product_series']['name'].upper()
|
||||
# )
|
||||
#
|
||||
# elif val < 150:
|
||||
# diametr_txt = '{0} truEnergy {1} серии {2} отлично подойдет для освещения вашей квартиры, дома или офиса.<br>'.format(
|
||||
# product_data_Dict['product_type']['name'].upper(),
|
||||
# product_data_Dict['article'],
|
||||
# product_data_Dict['product_series']['name'].upper()
|
||||
# )
|
||||
#
|
||||
# else:
|
||||
# diametr_txt = '{0} truEnergy {1} серии {2} - это энергоэффективное освещение для различных площадей и объектов.<br>'.format(
|
||||
# product_data_Dict['product_type']['name'].upper(),
|
||||
# product_data_Dict['article'],
|
||||
# product_data_Dict['product_series']['name'].upper()
|
||||
# )
|
||||
# # не светильник
|
||||
# else:
|
||||
# diametr_txt = '{0} truEnergy {1} серии {2} - это энергоэффективное решение для освещения различных пространств.<br>'.format(
|
||||
# product_data_Dict['product_type']['name'].upper(),
|
||||
# product_data_Dict['article'],
|
||||
# product_data_Dict['product_series']['name'].upper()
|
||||
# )
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
# ---------
|
||||
for property in product_data_Dict['properties_w_values_filtred']:
|
||||
|
||||
# ------
|
||||
|
||||
try:
|
||||
|
||||
if property['property']['name'] == 'Мощность':
|
||||
power = int(property['property_value'])
|
||||
|
||||
if power < 7:
|
||||
power_txt = 'Обладая низким энергопотреблением, этот {0} является заменой лампочки накаливания мощностью до 40 Ватт.<br>'.format(
|
||||
product_data_Dict['product_type']['name'].lower(),
|
||||
)
|
||||
|
||||
elif power < 13:
|
||||
power_txt = 'Энергоэффективность этого устройства позволяет использовть его в местах, ' \
|
||||
'где ранее использовались светильники с лампами накаливания мощностью до 75 Ватт.<br>'.format(
|
||||
)
|
||||
elif power < 19:
|
||||
power_txt = 'Этот {0} мощностью {1} Ватт легко заменит старые лампы накаливания мощностью до 100 Ватт ' \
|
||||
'или люминесцентные лампы мощностью до 40 Ватт.<br>'.format(
|
||||
product_data_Dict['product_type']['name'].lower(),
|
||||
str(power)
|
||||
)
|
||||
|
||||
elif power < 37:
|
||||
power_txt = 'Данная модель подходит для освещения больших пространств. ' \
|
||||
'Она не только поможет решить вопрос освещения, но и существенно сэкономит бюджет, ' \
|
||||
'выделенный на решение этой задачи.<br>'.format(
|
||||
product_data_Dict['product_type']['name'].lower(),
|
||||
)
|
||||
else:
|
||||
power_txt = '{0} Ватт, в данной модели обеспечивает мощный световой поток. ' \
|
||||
'Это дает возможность установки одного изделия для освещения помещений с большой ' \
|
||||
'площадью или открытых пространств.<br>'.format(
|
||||
str(power),
|
||||
product_data_Dict['product_type']['name'].lower(),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
# ------
|
||||
|
||||
try:
|
||||
|
||||
if property['property']['name'] == 'Световой поток' and product_data_Dict['article'] != '11043':
|
||||
val = int(property['property_value'])
|
||||
|
||||
if product_data_Dict['product_type']['name'] == 'Светильник светодиодный':
|
||||
lm_txt = 'Один {0} данной модели способен осветить до {1} м.кв. площади ' \
|
||||
'для рабочих зон и жилых комнат, и до {2} м.кв. площади для проходных и подсобных помещений ' \
|
||||
'(при стандартной высоте потолка и нормальной освещенности помещения).<br>'.format(
|
||||
product_data_Dict['product_type']['name'].lower(),
|
||||
str(round(val / 300,2)),
|
||||
str(round(val / 120, 2)),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
# -------
|
||||
|
||||
try:
|
||||
|
||||
if property['property']['name'] == 'IP (пылевлагозащита)':
|
||||
val = int(property['property_value'])
|
||||
|
||||
if val > 66:
|
||||
ip_txt = 'Максимальная защита IP{0} способна выдержать самые сильные испытания водой. ' \
|
||||
'Освещение с такой защитой используют для фонтанов и бассейнов.<br>'.format(
|
||||
str(val),
|
||||
)
|
||||
|
||||
elif val > 64:
|
||||
ip_txt = 'Данный продукт имеет высокую степень пылевлагозащиты - IP{0}. В связи с этим данная модель прекрасно подходит как ' \
|
||||
'для отапливаемых помещений с нормальным уровнем влажности, так и для помещений неотапливаемых, ' \
|
||||
'а также для эксплуатации на улице. Устройство с данной степенью защиты не боится пыли и влаги' \
|
||||
'а так же имеет защиту от струй воды со всех направлений.<br>'.format(
|
||||
str(val),
|
||||
)
|
||||
|
||||
elif val > 60:
|
||||
ip_txt = 'Степень защиты IP{0} обозначает полную защиту от брызг с любых сторон и имеет полную пылинепроницаемость ' \
|
||||
'(никакая пыль не может проникнуть внутрь корпуса устройства). ' \
|
||||
'Светильники подходят для установки в помещении и на улице, при рабочих температурах -20 до +40 градусов.<br>'.format(
|
||||
str(val),
|
||||
)
|
||||
|
||||
elif val > 53:
|
||||
ip_txt = 'У изделия с степенью защиты IP{0} снижена возможность попадания пыли внутрь корпуса ' \
|
||||
'и обеспечена полная защита расположенной внутри устройстав электроники.' \
|
||||
'Часто используют для рабочих помещений с повышенным содержанием пыли и влаги, а также под навесами.<br>'.format(
|
||||
str(val),
|
||||
product_data_Dict['product_type']['name'].lower(),
|
||||
product_data_Dict['product_type']['name_plural'].lower(),
|
||||
)
|
||||
|
||||
elif val > 40:
|
||||
ip_txt = 'Могут устанавливаться в помещения с повышенным уровнем пыли.'.format(
|
||||
product_data_Dict['product_type']['name'].lower(),
|
||||
)
|
||||
else:
|
||||
ip_txt = 'IP{0} - степень защиты данной модели, в связи с этим могут устанавливаться в' \
|
||||
' отапливаемые помещения с умеренным уровнем влажности.<br>'.format(
|
||||
str(val),
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
# -------
|
||||
|
||||
try:
|
||||
|
||||
if property['property']['name'] == 'Цветовая температура':
|
||||
val = int(property['property_value'])
|
||||
|
||||
if val < 3001:
|
||||
temp_txt = 'Теплый свет, генерируемый этой моделью способствует отдыху и расслаблению. ' \
|
||||
'Он приятен для глаз. В связи с этим рекомендуется устанавливать {0} ' \
|
||||
'с температурой {1}К в зоны отдыха, жилые комнаты и спальни, кафе, лаундж зоны. ' \
|
||||
'Очень удачное решение для обеденных и гостинных комнат.<br>'.format(
|
||||
product_data_Dict['product_type']['name_plural'].lower(),
|
||||
str(val),
|
||||
)
|
||||
|
||||
elif val < 4601:
|
||||
temp_txt = 'Модель обладает нейтральным цветом свечения, который прекрасно подходит и как для жилых помещений и комнат, ' \
|
||||
'так и для рабочих зон (офисов, кабинетов, производств) . ' \
|
||||
'Данный свет стимулирует к работе не вызывая перенапряжения глаз и не искажая цветопередачу. ' \
|
||||
'Универсальное и наиболее распространенное решение.<br>'.format(
|
||||
str(val),
|
||||
)
|
||||
|
||||
elif val < 7001:
|
||||
temp_txt = 'Цветовая температура {0}К - наиболее оптимально использование в помещениях промышленного назначения, ' \
|
||||
'административных зданиях, на производствах, складах, гаражах, паркингах. ' \
|
||||
'Однако могут применяться и в интерьере для создания акцентов в дизайне, ' \
|
||||
'либо если предпочтения потребителя отданы в пользу белого света. <br>'.format(
|
||||
str(val),
|
||||
)
|
||||
|
||||
|
||||
|
||||
else:
|
||||
temp_txt = 'От показателя цветовой температуры зависит то, как Вы будут воспринимать предметы и другие объекты освещенные устройством. ' \
|
||||
'С помощью цветовой температуры можно сделать более приятным отдых и улучшить эффективность работы. ' \
|
||||
'Отниситесь внимательно к выбору устройства по этому параметру.<br>'.format(
|
||||
str(val),
|
||||
)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
# -------
|
||||
|
||||
try:
|
||||
|
||||
if property['property']['name'] == 'Тип монтажа':
|
||||
val = property['property_value']
|
||||
|
||||
if val == 'встраиваемый':
|
||||
install_txt = 'Устройство устанавливается в предварительно вырезанное в поверхности отверстие. ' \
|
||||
'Этот вариант монтажа используется для подвесных и натяжных потолков, а так же для фальш-стен и ниш.'.format(
|
||||
str(val),
|
||||
)
|
||||
|
||||
elif val == 'накладной':
|
||||
install_txt = 'Способ крепления - накладной. Значит эта модель может быть закреплена на любую ровную поверхность.'.format(
|
||||
str(val),
|
||||
)
|
||||
|
||||
elif val == 'встраиваемый/накладной':
|
||||
install_txt = '{0} обладает возможностью монтажа как в отверстия на поверхности плоскостей, так и на любую ровную поверхность.'.format(
|
||||
product_data_Dict['article'],
|
||||
)
|
||||
|
||||
else:
|
||||
pass
|
||||
|
||||
if 'height_visible_part' in product_data_Dict and product_data_Dict['height_visible_part']:
|
||||
install_txt = install_txt + ' Высота видимой части устройства после монтажа составит {0}мм.<br>'.format(
|
||||
str(round(product_data_Dict['height_visible_part']))
|
||||
)
|
||||
else:
|
||||
install_txt = install_txt + '<br>'
|
||||
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
product_data_Dict['seo_text'] = '{0}{1}{2}{3}{4}{5}'.format(
|
||||
diametr_txt,
|
||||
power_txt,
|
||||
lm_txt,
|
||||
ip_txt,
|
||||
temp_txt,
|
||||
install_txt
|
||||
)
|
||||
|
||||
return product_data_Dict
|
||||
15
BaseModels/templates/m_show_message.html
Normal file
15
BaseModels/templates/m_show_message.html
Normal file
@@ -0,0 +1,15 @@
|
||||
<div class="modal">
|
||||
<div class="alert-window"{% if order %} data-order_id="{{ order.id }}" data-pay_type="{{ order.payType }}"{% endif %} style="{{ form_style|safe }}">
|
||||
<img class="stat-img" src="/static/img/{{ form_icon }}"/>
|
||||
<p class="caption">{{ caption|safe }}</p>
|
||||
<p class="message">{{ message|safe }}</p>
|
||||
{% if form %}
|
||||
<div class="fieldset">
|
||||
{% for item in form %}
|
||||
{{ item }}
|
||||
{% endfor %}
|
||||
</div>
|
||||
{% endif %}
|
||||
{{ buttons|safe }}
|
||||
</div>
|
||||
</div>
|
||||
1
BaseModels/templatetags/__init__.py
Normal file
1
BaseModels/templatetags/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__author__ = 'SDE'
|
||||
155
BaseModels/templatetags/base_tags_extra.py
Normal file
155
BaseModels/templatetags/base_tags_extra.py
Normal file
@@ -0,0 +1,155 @@
|
||||
__author__ = 'SDE'
|
||||
|
||||
from django import template
|
||||
from django.template.defaultfilters import stringfilter
|
||||
|
||||
register = template.Library()
|
||||
|
||||
from django.core.serializers import serialize
|
||||
from django.db.models.query import QuerySet
|
||||
# import simplejson
|
||||
from django.template import Library
|
||||
from django.utils.html import mark_safe
|
||||
|
||||
@register.filter('get_value_from_dict')
|
||||
def get_value_from_dict(dict_data, key):
|
||||
"""
|
||||
usage example {{ your_dict|get_value_from_dict:your_key }}
|
||||
"""
|
||||
|
||||
if key in dict_data:
|
||||
res = dict_data[key]
|
||||
return res
|
||||
|
||||
return False
|
||||
|
||||
|
||||
|
||||
@register.filter()
|
||||
def get_rows_count_by_cols_count(data, cols_count):
|
||||
rows_count = len(data) // cols_count
|
||||
if len(data) % cols_count:
|
||||
rows_count += 1
|
||||
return rows_count
|
||||
|
||||
@register.filter()
|
||||
def get_numbers_list(from_el, to_el):
|
||||
res = range(from_el, to_el+1)
|
||||
return res
|
||||
|
||||
|
||||
def val_type(value):
|
||||
res = type(value)
|
||||
return res.__name__
|
||||
register.filter('val_type', val_type)
|
||||
|
||||
@register.filter()
|
||||
def get_cols_table_data_for_row_when_cols3(value, row):
|
||||
el_count = 3
|
||||
from_el = (row-1) * el_count
|
||||
to_el = row * el_count
|
||||
part = list(value)[from_el:to_el]
|
||||
return part
|
||||
# register.filter('val_type', val_type)
|
||||
|
||||
|
||||
@register.filter
|
||||
@stringfilter
|
||||
def correct_for_tables(value):
|
||||
if value in ['None', '0.0']:
|
||||
return '-'
|
||||
return value
|
||||
|
||||
|
||||
@register.filter
|
||||
@stringfilter
|
||||
def del_bad_symbols(value):
|
||||
from BaseModels.functions import del_bad_symbols
|
||||
return del_bad_symbols(value)
|
||||
|
||||
|
||||
@register.filter
|
||||
@stringfilter
|
||||
def del_amp_symbols(value):
|
||||
from BaseModels.functions import del_nbsp
|
||||
return del_nbsp(value)
|
||||
|
||||
@register.filter
|
||||
@stringfilter
|
||||
def del_lang_from_path(value):
|
||||
path_list = value.split('/')
|
||||
path = u''
|
||||
for i in path_list[1:]:
|
||||
path.join(i + '/')
|
||||
return path
|
||||
|
||||
@register.filter
|
||||
@stringfilter
|
||||
def get_color_by_number(value, arg=None):
|
||||
|
||||
color = None
|
||||
try:
|
||||
val = float(value)
|
||||
|
||||
if not color and arg == u'%':
|
||||
|
||||
color = u'black'
|
||||
if val > 50:
|
||||
color = u'green'
|
||||
elif val <= 50 and val >= 25:
|
||||
color = u'#6c8107'
|
||||
elif val <= 25 and val >= 10:
|
||||
color = u'#a89803'
|
||||
elif val <= 10 and val >= 5:
|
||||
color = u'#e6a707'
|
||||
elif val <= 5 and val >= 0:
|
||||
color = u'#e67307'
|
||||
elif val <= 0:
|
||||
color = u'red'
|
||||
|
||||
|
||||
# val_range = val_max - val_min
|
||||
# # val_percent = (val_range * 100 / val) - 100
|
||||
# offset = -(val_min + -(val))
|
||||
# if val <0:
|
||||
# val = offset
|
||||
# if val > val_max:
|
||||
# val = val_max
|
||||
# elif val < 0:
|
||||
# val = 0
|
||||
#
|
||||
# color_range = 16711680 - 1211136
|
||||
# val_1unit = float(color_range) / float(val_range)
|
||||
# dec_color = 16711680 - int(val_1unit * val)
|
||||
|
||||
if not color:
|
||||
color = u'black'
|
||||
if val > 1000:
|
||||
color = u'green'
|
||||
elif val <= 1000 and val >= 500:
|
||||
color = u'#6c8107'
|
||||
elif val <= 500 and val >= 250:
|
||||
color = u'#a89803'
|
||||
elif val <= 250 and val >= 125:
|
||||
color = u'#e6a707'
|
||||
elif val <= 125 and val >= 50:
|
||||
color = u'#e67307'
|
||||
elif val <= 50:
|
||||
color = u'red'
|
||||
|
||||
# s = u'style="color: #{0}12;"'.format(str(hex(dec_color))[2:6])
|
||||
s = u'style="color: {0};"'.format(color)
|
||||
return s
|
||||
except:
|
||||
return u''
|
||||
|
||||
|
||||
# @register.filter
|
||||
# @stringfilter
|
||||
# def check_aprox_compare_strings(search_phrase, txt):
|
||||
# from ProductApp.search import get_highlight_string
|
||||
#
|
||||
# s = get_highlight_string(search_phrase, txt)
|
||||
#
|
||||
# return s
|
||||
|
||||
Reference in New Issue
Block a user