commit c17da7eaab3c19931057a00c58eb688e854c7e71 Author: SDE Date: Tue May 16 17:14:16 2023 +0300 init diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..6887489 --- /dev/null +++ b/.gitignore @@ -0,0 +1,415 @@ +### JetBrains+all template +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +.idea/ + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### JetBrains template +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### Django template +*.log +*.pot +*.pyc +__pycache__/ +local_settings.py +db.sqlite3 +db.sqlite3-journal +media + +# If your build process includes running collectstatic, then you probably don't need or want to include staticfiles/ +# in your Git repository. Update and uncomment the following line accordingly. +# /staticfiles/ + +### JetBrains+iml template +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# SonarLint plugin +.idea/sonarlint/ + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### Python template +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + diff --git a/AuthApp/__init__.py b/AuthApp/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/AuthApp/admin.py b/AuthApp/admin.py new file mode 100644 index 0000000..6570857 --- /dev/null +++ b/AuthApp/admin.py @@ -0,0 +1,253 @@ +# coding=utf-8 +from django.contrib import admin +from django.contrib.auth.admin import UserAdmin +from BaseModels.admin_utils import * +from django.contrib.auth.models import User +from django.utils.translation import gettext_lazy as _ + +from AuthApp.models import * + +from django.contrib.auth.models import Group +from django.db import models +from django.contrib.admin.models import LogEntry + +from django.db.models import F, Value as V +from django.db.models.functions import Concat + +from django.contrib.auth.admin import GroupAdmin as BaseGroupAdmin +from django.contrib.auth.models import Group +from django.contrib.admin import SimpleListFilter + + +class LogEntryAdmin(admin.ModelAdmin): + # pass + list_display = ( + '__str__', 'action_time', 'user', 'content_type', 'object_id', 'object_repr', 'action_flag', 'change_message') + list_filter = ('content_type', 'action_flag') + search_fields = ['user__username', 'change_message', 'object_id', 'object_repr'] + date_hierarchy = 'action_time' + + def has_delete_permission(self, request, obj=None): + return False + + +admin.site.register(LogEntry, LogEntryAdmin) + +# class Admin_ProfileInline(admin.StackedInline): + # fieldsets = ( + # (None, { + # 'classes': ['wide'], + # 'fields': ( + # ('enable',), + # ('regions',), + # ('company_obj', 'office', 'company_position', 'departament'), + # ('delivery_address'), + # ('discount',), + # ('work_start_D', 'work_finish_D'), + # ('days_to_order_cancellation_default', 'days_to_pay_default', 'pay_terms'), + # ('authMailCode', 'document_sign_person'), + # ('birthdate'), + # 'comment', 'creator' + # ) + # }), + # ('Дополнительно', { + # 'classes': ['wide'], + # 'fields': ( + # ('connected_mailings',), + # ('mailing_sets', 'json_data', 'sync_data') + # ) + # }), + # ('1С', { + # 'classes': ['wide'], + # 'fields': ( + # ('id_1s', 'name',), + # ) + # }), + # ) + # + # model = UserProfile + # can_delete = False + # extra = 1 + # fk_name = 'user' + # + # filter_horizontal = ['regions', 'connected_mailings'] + # raw_id_fields = ("company_obj", 'office') + # verbose_name_plural = _(u'Профиль пользователя') + # + # list_display = ['company_obj', 'office', 'company_position', 'departament', 'creator'] + # readonly_fields = ['creator', ] + + + + + +# Define a new User admin +class Admin_User(UserAdmin): + pass + # def user_groups(self, obj): + # return ' \ '.join(obj.groups.all().values_list('name', flat=True)) + # + # user_groups.short_description = u'Группы' + # + # def last_web_request(self, obj): + # return obj.user_profile.last_web_request + # + # last_web_request.short_description = u'Последний запрос' + + # def profile_enable(self, obj): + # if obj.user_profile.enable: + # return '+' + # else: + # return '-' + # + # profile_enable.short_description = u'Включен' + + # fieldsets = ( + # (None, { + # 'classes': ['wide'], + # 'fields': ( + # ('username', 'password'), + # ('first_name', 'last_name', 'email'), + # ('is_active', 'is_staff', 'is_superuser'), + # ('groups', 'user_permissions'), + # ('last_login', 'date_joined'), + # # ('username', 'first_name', 'last_name'), + # # ('password'), + # # ('email', 'is_active'), + # # ('is_staff') + # ) + # }), + # + # ) + + save_on_top = True + + # list_display = ['id', 'profile_enable', 'last_name', 'first_name', 'email', 'last_web_request', 'is_staff', + # 'is_active', 'user_groups'] + # list_editable = ['is_staff', 'is_active'] + # list_display_links = ['first_name', 'last_name', 'email'] + # search_fields = ['first_name', 'last_name', 'email'] + # + # inlines = (Admin_ProfileInline,) + # actions = ['del_all_temp_users', ] + # + # ordering = ['is_staff', 'last_name', 'first_name'] + # + # def del_all_temp_users(modeladmin, request, queryset): + # queryset.filter(mipp_user__temporary_user=True).delete() + # + # del_all_temp_users.short_description = _(u'Удалить всех временных пользователей') + + +# Re-register UserAdmin +admin.site.unregister(User) +admin.site.register(User, Admin_User) + + +class Admin_UserProfile(Admin_BaseIconModel): + pass + # def get_list_filter(self, request): + # res = super(Admin_UserProfile, self).get_list_filter(request) + # user_groups = request.user.groups.all().values_list('name', flat=True) + # if request.user.is_superuser or 'Отдел продаж: Начальник отдела продаж' in user_groups or 'Маркетинг: Маркетолог' in user_groups: + # return res + # + # return [] + # + # def get_queryset(self, request): + # user_groups = request.user.groups.all().values_list('name', flat=True) + # if request.user.is_superuser or 'Отдел продаж: Начальник отдела продаж' in user_groups or 'Маркетинг: Маркетолог' in user_groups: + # return UserProfile.objects.all() + # + # companies_ids = request.user.companies_for_manager.all().values_list('id', flat=True) + # queryset = UserProfile.objects.filter( + # company_obj__id__in=companies_ids + # # ).annotate( + # # lead_source = F('company_obj__lead_source') + # ) + # return queryset + # + # def get_list_display_links(self, request, list_display): + # res = super(Admin_UserProfile, self).get_list_display_links(request, list_display) + # if not request.user.is_superuser: # and not request.user.has_perm('AuthApp.change_userprofile'): + # return None + # return res + # + # def get_changelist_instance(self, request): + # if not request.user.is_superuser: # and not request.user.has_perm('AuthApp.change_userprofile'): + # self.list_editable = ['birthdate'] + # return super(Admin_UserProfile, self).get_changelist_instance(request) + # + # def user_name(self, obj): + # return '{0} {1}'.format(obj.user.last_name, obj.user.first_name) + # + # user_name.short_description = u'Имя' + # + # def lead_source(self, obj): + # res = None + # if obj.company_obj: + # res = obj.company_obj.lead_source + # if not res: + # res = '-' + # else: + # res = obj.company_obj.get_lead_source_display() + # return res + # + # lead_source.short_description = 'Источник' + # lead_source.admin_order_field = 'company_obj__lead_source' + # + # def manager(self, obj): + # if not obj.company_obj or not obj.company_obj.manager_obj: + # return '-' + # + # return '{0}'.format(obj.company_obj.manager_obj.get_full_name()) + # + # manager.short_description = u'Менеджер' + # + # fieldsets = ( + # (None, { + # 'classes': ['wide'], + # 'fields': ( + # 'user', 'enable', 'account_type', + # ('discount',), + # ('work_start_D', 'work_finish_D'), + # ('company_obj', 'company', 'office', 'company_position', 'departament'), + # ('delivery_address'), + # ('days_to_order_cancellation_default', 'days_to_pay_default', 'pay_terms'), + # ('authMailCode', 'document_sign_person'), + # ('birthdate'), + # ('connected_mailings',), + # 'creator' + # ) + # }), + # ('1С', { + # 'classes': ['wide'], + # 'fields': ( + # ('id_1s', 'name',), + # ) + # }), + # ) + # + # save_on_top = True + # + # list_display = [ + # # 'user__last_name', 'user__first_name', 'user__email', 'user__is_staff', 'user__is_active', + # 'id', 'user_name', 'user', 'enable', 'birthdate', 'lead_source', 'manager', + # 'company_obj', 'office', 'company_position', 'departament', 'account_type', 'modifiedDT', 'createDT' + # ] + # list_editable = ['enable', 'birthdate'] + # list_display_links = ['id', ] # 'user__last_name', 'user__first_name'] + # search_fields = [ + # 'id', 'user__last_name', 'user__first_name', 'user__email', 'company_obj__name', 'company_position', + # 'departament', + # 'company_obj', 'company_obj__manager_obj' + # ] # 'user__last_name', 'user__first_name', 'user__email'] + # + # list_filter = ['company_obj__lead_source', ManagersFilter, 'account_type'] + # + # filter_horizontal = ['connected_mailings'] + # # raw_id_fields = ("favourites",) + # verbose_name_plural = _(u'Профиль пользователя') + + +admin.site.register(UserProfile, Admin_UserProfile) diff --git a/AuthApp/api/__init__.py b/AuthApp/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/AuthApp/api/api_1C_views.py b/AuthApp/api/api_1C_views.py new file mode 100644 index 0000000..5892f21 --- /dev/null +++ b/AuthApp/api/api_1C_views.py @@ -0,0 +1,88 @@ +# coding=utf-8 +from BaseModels.api.base_api_views import * +from ..models import * +from rest_framework.response import Response +from BaseModels.mailSender import techSendMail +import json +from BaseModels.api.base_api_permissions import * +from datetime import datetime +import re +from rest_framework import status +from BaseModels.api.base_api_serializers import Import_Pocket_Srializer + + + +class Managers_1C_get_timestamp(APIBaseSimplaClass): + + permission_classes = (api_1C_perm,) + serializer_class = Import_Pocket_Srializer + + def get(self, request): + from GeneralApp.views import get_timestamp_by_property_item_name + + property_item_name = u'1S_managers' + + last_timestamp = get_timestamp_by_property_item_name(property_item_name) + + return Response({ + 'property_item_name' : property_item_name, + 'timestamp' : last_timestamp + }) + + +class Managers_1C_import(APIBaseSimplaClass): + + # authentication_classes = (authentication.TokenAuthentication,) + permission_classes = (api_1C_perm,) + serializer_class = Import_Pocket_Srializer + + def post(self, request, format=None): + """ + import 1C companies data + """ + + log = '' + res = '' + + try: + + data = request.data + + if not data: + msg = 'нет данных в пакете' + res_Dict = { + 'status': 'error', + 'error': msg + } + return Response(res_Dict, status=status.HTTP_400_BAD_REQUEST) + + + from .import_1C_data import import_1C_pocket + res = import_1C_pocket(data) + + res_Dict = { + 'status': 'finished', + 'log': log + } + res_Dict.update(res) + + return Response(res_Dict) + + except Exception as e: + len_data = 0 + if request.data: + len_data = len(request.data) + title = 'ОШИБКА tE Managers_1C_import' + msg = 'Managers_1C_import Error = {0}({1})
lenght data = {2}
log...
{3}'.format( + str(e), str(e.args), str(len_data), str(res) + ) + from BaseModels.mailSender import techSendMail_for_specified_email_list + from tEDataProj.inter import problem_solvers_personal_1S + techSendMail_for_specified_email_list(msg, problem_solvers_personal_1S, title=title) + + res_Dict = { + 'status': 'error', + 'error': str(e) + } + + return Response(res_Dict, status=status.HTTP_400_BAD_REQUEST) \ No newline at end of file diff --git a/AuthApp/api/api_permissions.py b/AuthApp/api/api_permissions.py new file mode 100644 index 0000000..5c000e9 --- /dev/null +++ b/AuthApp/api/api_permissions.py @@ -0,0 +1,76 @@ +# coding=utf-8 +from rest_framework.permissions import BasePermission +from rest_framework.exceptions import PermissionDenied +from tEDataProj.inter import check_user_key_inter + + +class Auth_API_perms(BasePermission): + """ + Allows access only users w full access. + """ + + def has_permission(self, request, view): + + if not request.user or request.user.is_anonymous or not request.user.is_active: + return False + + # auth_data = request.query_params + # if not check_user_key_inter(auth_data): + # raise PermissionDenied(code=403) + + user = request.user + groups = user.groups.all() + + groups_name_list = groups.values_list('name', flat=True) + + if u'API 1С импорт' in groups_name_list: + return False + + if view.basename == u'user': + # if view.action in ('get_subordinate_staff',): + # return True + + if view.action in ( + 'get_sales_department_staff',) and u'Отдел продаж: Начальник отдела продаж' in groups_name_list: + return True + + if view.action in ('create',): + perm = user.has_perm('AuthApp.UI_managers_create') + return perm + + if view.action in ('update', 'partial_update', 'add_communication_item'): + perm = user.has_perm('AuthApp.UI_managers_modify') + return perm + + if view.action in ('destroy',): + perm = user.has_perm('AuthApp.UI_managers_delete') + return perm + + if view.action in ( + 'retrieve', 'list', 'list_by_company_id', 'list_by_office_id', 'get_subordinate_staff', + 'get_all_staff'): + perm = user.has_perm('AuthApp.UI_managers_retrieve') + return perm + + if view.action in ('get_sales_stat_by_productid', 'stat_list'): + perm = user.has_perm('AuthApp.UI_managers_all_stat') or user.has_perm('AuthApp.UI_managers_self_stat') + return perm + + # if view.basename == u'userprofile': + # + # if view.action in ('create',): + # perm = user.has_perm('AuthApp.add_userprofile') + # return perm + # + # if view.action in ('update', 'partial_update'): + # perm = user.has_perm('AuthApp.change_userprofile') + # return perm + # + # if view.action in ('destroy',): + # perm = user.has_perm('AuthApp.delete_userprofile') + # return perm + # + # if view.action in ('retrieve', 'list'): + # return True + + return False diff --git a/AuthApp/api/api_urls.py b/AuthApp/api/api_urls.py new file mode 100644 index 0000000..dde2c0a --- /dev/null +++ b/AuthApp/api/api_urls.py @@ -0,0 +1,17 @@ +from django.conf.urls import url, include +# from .api_views import * +from rest_framework import routers +from .api_1C_views import * +from .v1.user.user_api_views import * + +router = routers.SimpleRouter() +# router.register(r'user', UserProfile_ViewSet) +# router.register(r'client_person', Client_Person_ViewSet) +router.register(r'v1/managers', v1_Managers_ViewSet) +router.register(r'v1/personal', v1_Personal_ViewSet) + +urlpatterns = router.urls + [ + url(r'^1c/import_managers_data$', Managers_1C_import.as_view()), + # url(r'^1c/managers_import_1C_pocket_from_file$', ), + url(r'^1c/import_managers/get_last_timestamp$', Managers_1C_get_timestamp.as_view()), +] \ No newline at end of file diff --git a/AuthApp/api/api_views.py b/AuthApp/api/api_views.py new file mode 100644 index 0000000..06e606d --- /dev/null +++ b/AuthApp/api/api_views.py @@ -0,0 +1,94 @@ +# coding=utf-8 +from BaseModels.api.base_api_views import * +from AuthApp.models import * +from .serializers import * +from .api_permissions import * +from rest_framework.decorators import action + + + +def get_buttons_states_Dict(user): + + Dict = { + 'managers_menu': user.has_perm('AuthApp.UI_managers_show'), + 'managers_add_but': user.has_perm('AuthApp.UI_managers_create'), + 'managers_change_but': user.has_perm('AuthApp.UI_managers_modify'), + 'managers_delete_but': user.has_perm('AuthApp.UI_managers_delete'), + + 'admin_group_menu': user.has_perm('AuthApp.UI_adminGroup_show'), #показываем в меню группу Администрирование + 'personal_show': user.has_perm('AuthApp.UI_personal_show'), #показываем в группе Администрирование пункт Персонал + } + + return Dict + + + +class Client_Person_ViewSet(APIViewSet_ModelClass): + queryset = UserProfile.objects.filter(user__is_staff=False) + serializer_class = UserProfile_Serializer + permission_classes = (Auth_API_perms,) + + def get_serializer_class(self): + + try: + if self.action == 'retrieve': + return UserProfile_Serializer + + except (KeyError, AttributeError): + pass + + return super(Client_Person_ViewSet, self).get_serializer_class() + + + + +class Staff_Person_ViewSet(APIViewSet_ModelClass): + queryset = UserProfile.objects.filter(user__is_staff=True) + serializer_class = UserProfile_Serializer + permission_classes = (Auth_API_perms,) + + def get_serializer_class(self): + + try: + if self.action == 'retrieve': + return UserProfile_Serializer + + except (KeyError, AttributeError): + pass + + return super(Staff_Person_ViewSet, self).get_serializer_class() + + + +class UserProfile_ViewSet(APIViewSet_ModelClass): + queryset = UserProfile.objects.all() + serializer_class = UserProfile_Serializer + permission_classes = (Auth_API_perms,) + + def get_serializer_class(self): + + try: + if self.action == 'retrieve': + return UserProfile_Serializer + + except (KeyError, AttributeError): + pass + + return super(UserProfile_ViewSet, self).get_serializer_class() + + # @action(methods=['GET'], detail=True) + # def get_current_order(self, request, pk): + # from B2BApp.models import Order + # from B2BApp.api.serializers import B2B_Order_serializer + # + # try: + # from B2BApp.views import get_waiting_order_or_create_new + # order = get_waiting_order_or_create_new(request, pk) + # except Order.DoesNotExist: + # raise serializers.ValidationError( + # u'Ошибка, функция недоступна' + # ) + # + # order_data = B2B_Order_serializer(order) + # + # return Response(order_data.data) \ No newline at end of file diff --git a/AuthApp/api/import_1C_data.py b/AuthApp/api/import_1C_data.py new file mode 100644 index 0000000..d4ca1ec --- /dev/null +++ b/AuthApp/api/import_1C_data.py @@ -0,0 +1,194 @@ +# coding=utf-8 +from BaseModels.mailSender import techSendMail + +from ..models import * +from datetime import date, datetime +from GeneralApp.views import get_timestamp_by_property_item_name, set_timestamp_by_propertiy_item_name +from BaseModels.error_processing import * +from uuid import uuid1 +from GeneralApp.temp_data_funcs import add_tmp_data, del_tmp_data_by_obj + +def avg(val): + """uses floating-point division.""" + return sum(val) / float(len(val)) + + +from django.http import HttpResponse +import json + +def import_1C_pocket_from_file(request): + + if not request.user.is_superuser: + return HttpResponse(u'import_1C_pocket_from_file PERMISSION FAIL') + + try: + f = open('companies_1s.txt') + data = f.read() + except: + return HttpResponse(u'import_1C_pocket_from_file READ FILE FAIL') + + import re + data = re.sub(r'[\r\n\t]', ' ', data) + data = re.sub(r'\s+', ' ', data) + + request_data = data + + data = json.loads(request_data) + + import_1C_pocket(data) + + return HttpResponse(u'import_1C_pocket_from_file Accept') + + +def import_1C_pocket(json_data): + + + log = '' + log_begin_DT = datetime.now() + msg = 'import_1C_pocket MANAGERS start - {0}
---------------

'.format(str(log_begin_DT)) + log = '{0}
{1}'.format(log, msg) + + try: + data = json_data['data_list'] + timestamp = json_data['timestamp'] + msg = str(timestamp) + log = '{0}
{1}'.format(log, msg) + + # dt = datetime.fromtimestamp(timestamp) + + saved_timestamp = get_timestamp_by_property_item_name('1S_managers') + + if saved_timestamp and saved_timestamp >= timestamp: + # generate_error(f, u'import_1S_companies', u'1S_companies пакет устарел, импорт не был произведен', u'') + msg = '!!!!! --- 1S_managers пакет устарел, импорт не был произведен' + print(msg) + return {u'result': u'1S_managers пакет устарел, импорт не был произведен', 'error': 304} + set_timestamp_by_propertiy_item_name('1S_managers', timestamp) + except: + data = json_data + + # сохраняем данные для импорта временно в БД + tmp_data = add_tmp_data(data_type='import_proc', data_target='managers_1s_import', data=data) + + + for item in data: + # print(str(item)) + # break + + json_item = json.dumps(item, ensure_ascii=False) + + try: + if not u'id' in item: + # generate_error(f, u'import_1S_companies', u'1S_companies ID отсутствует в экзепляре данных', json_item) + msg = '!!!!! --- 1S_managers ID отсутствует в экзепляре данных' + log = '{0}
{1}'.format(log, msg) + return { + u'result': msg, + u'error': 400, + + } + + msg = '{0} - {1}'.format(item[u'id'], item[u'name']) + log = '{0}
{1}'.format(log, msg) + + user_profiles = UserProfile.objects.filter(id_1s=item[u'id']) + + + kwargs = { + 'name' : item[u'name'].replace(u"'", '"'), + 'id_1s' : item[u'id'], + 'company_position' : item[u'position'], + 'departament': str(item[u'subdiv']), + } + + work_start_D = item[u'datein'].replace(u" ", '') + if len(work_start_D) > 9: + work_start_D = datetime.strptime(work_start_D, "%d.%m.%Y") + kwargs.update({'work_start_D' : work_start_D}) + + work_finish_D = item[u'dateout'].replace(u" ", '') + if len(work_finish_D) > 9: + work_finish_D = datetime.strptime(work_finish_D, "%d.%m.%Y") + kwargs.update({'work_finish_D': work_finish_D}) + + birthday = item[u'birthday'].replace(u" ", '') + if len(birthday) > 9: + birthday = datetime.strptime(birthday, "%d.%m.%Y") + kwargs.update({'birthdate': birthday}) + + + + user = None + u_profile = None + if user_profiles: + user_profiles.update(**kwargs) + u_profile = user_profiles[0] + msg = ' - ОБНОВЛЕНИЕ данных МЕНЕДЖЕРА - {0}'.format(str(u_profile.__dict__)) + user = u_profile.user + + if not user: + username = str(item[u'id']) + mail = '{0}@truenergy.by'.format(str(item[u'id'])) + password = user_id = str(uuid1().hex)[:10] + user = User.objects.create_user(username=username, email=mail, password=password) + user.is_staff = True + user.is_active = True + user.is_superuser = False + user.set_password(password) + user.save() + + kwargs.update({'user': user}) + + user_profiles = UserProfile.objects.filter(user=user) + user_profiles.update(**kwargs) + u_profile = user_profiles[0] + + msg = ' - СОЗДАНИЕ МЕНЕДЖЕРА - {0}'.format(str(u_profile.__dict__)) + + log = '{0}
{1}'.format(log, msg) + + msg = '' + if 'work_finish_D' in kwargs and user.is_active: + user.is_active = False + msg = ' - отключен доступ
' + name_list = item[u'name'].split(' ') + if len(name_list) > 1 and user.first_name != ' '.join(name_list[1:]): + user.first_name = ' '.join(name_list[1:]) + msg = ' - изменено имя
' + if len(name_list) > 0 and user.last_name != name_list[0]: + user.last_name = name_list[0] + msg = ' - изменена фамилия' + + if msg: + user.save() + log = '{0}
{1}'.format(log, msg) + + + + except Exception as e: + # generate_error(f, u'import_1S_companies', str(e), json_item) + msg = '!!!!! --- import_1C_pocket MANAGERS error={0}'.format(str(e)) + print(msg) + log = '{0}
{1}'.format(log, msg) + + # close_log_file(f, u'END import_1S_companies') + + # удаляем временные данные для импорта из БД + if tmp_data: + del_tmp_data_by_obj(tmp_data) + + msg = 'import_1C_package MANAGERS finish - {0} (processing time = {1}
---------------

'.format( + str(datetime.now()), + str(datetime.now() - log_begin_DT) + ) + log = '{0}
{1}'.format(log, msg) + + title = 'import_1C_pocket MANAGERS' + techSendMail(log, title) + + return { + u'result': log + } + + + diff --git a/AuthApp/api/init_api.py b/AuthApp/api/init_api.py new file mode 100644 index 0000000..b1cc1f2 --- /dev/null +++ b/AuthApp/api/init_api.py @@ -0,0 +1,26 @@ +from ..models import mail_list_types +from django.http import JsonResponse, Http404 + + +def init_API(request, get_Dict=False): + + # auth_data = request.GET + # if not check_user_key_inter(auth_data): + # raise exceptions.PermissionDenied() + + Dict = { + + } + + mail_list_types_Dict = {} + for item in mail_list_types: + mail_list_types_Dict.update({ + item[0] : item[1] + }) + Dict.update({'mail_list_types' : mail_list_types_Dict}) + + if get_Dict: + return Dict + + + return JsonResponse({'data': Dict}) \ No newline at end of file diff --git a/AuthApp/api/serializers.py b/AuthApp/api/serializers.py new file mode 100644 index 0000000..6fc9429 --- /dev/null +++ b/AuthApp/api/serializers.py @@ -0,0 +1,49 @@ +from rest_framework import serializers +from AuthApp.models import * + + +# Person + + +class client_UserProfile_Serializer(serializers.ModelSerializer): + + class Meta: + model = UserProfile + fields = ( + 'id', + # 'name', 'company', 'departament', 'company_position', 'phone', 'email', 'document_sign_person', + # 'days_to_order_cancellation_default', 'days_to_pay_default', 'pay_terms', 'discount', 'birthdate', + + ) + + +class staff_UserProfile_Serializer(serializers.ModelSerializer): + class Meta: + model = UserProfile + fields = ( + 'id', + # 'name', 'company', 'departament', 'company_position', 'phone', 'email', + # 'birthdate', + ) + + +class UserProfile_Serializer(serializers.ModelSerializer): + + class Meta: + model = UserProfile + fields = ( + 'id', + # 'name', 'company', 'departament', 'company_position', 'phone', 'email', 'document_sign_person', + # 'days_to_order_cancellation_default', 'days_to_pay_default', 'pay_terms', 'discount', 'birthdate', + ) + +class UserProfile_list_Serializer(serializers.ModelSerializer): + class Meta: + model = UserProfile + fields = ( + 'id', + # 'name' + ) + + +# ---------------------------------------- \ No newline at end of file diff --git a/AuthApp/api/v1/permissions/personal_api_permissions.py b/AuthApp/api/v1/permissions/personal_api_permissions.py new file mode 100644 index 0000000..efa44ea --- /dev/null +++ b/AuthApp/api/v1/permissions/personal_api_permissions.py @@ -0,0 +1,116 @@ +# coding=utf-8 +from rest_framework.permissions import BasePermission +from rest_framework.exceptions import PermissionDenied +from tEDataProj.inter import check_user_key_inter +from AuthApp.models import User + + +def check_of_user_is_manager_of_company(user, view): + if not 'pk' in view.kwargs: + return False + + try: + objs = User.objects.get( + user_profile__company_obj__manager_obj=user, + id=view.kwargs['pk'] + ) + except: + return False + + return objs + + +def check_of_user_is_company_staff(user, view): + if not 'pk' in view.kwargs: + return False + + try: + objs = User.objects.get( + user_profile__company_obj=user.user_profile.company_obj, + id=view.kwargs['pk'] + ) + except: + return False + + return objs + + +class Personal_API_perms(BasePermission): + """ + Allows access only users w full access. + """ + + def has_permission(self, request, view): + + if not request.user or request.user.is_anonymous or not request.user.is_active: + return False + + # auth_data = request.query_params + # if not check_user_key_inter(auth_data): + # raise PermissionDenied(code=403) + + user = request.user + groups = user.groups.all() + + groups_name_list = groups.values_list('name', flat=True) + + if u'API 1С импорт' in groups_name_list: + return False + + if view.basename == u'user': + + if view.action in ('create',): + perm = user.has_perm('AuthApp.UI_company_staff_create') + return perm + + if view.action in ( + 'update', 'partial_update', 'add_communication_item', 'get_connected_mailings', 'possible_mailings', + 'change_mailing_status'): + # perm = user.has_perm('AuthApp.UI_managers_modify') + # return perm + if not user.is_staff: + # персонал компании + if user.has_perm('AuthApp.UI_company_staff_modify_if_staff_company'): + return check_of_user_is_company_staff(user, view) + + return False + + # если персонал + else: + if check_of_user_is_manager_of_company(user, view) and user.has_perm( + 'AuthApp.UI_company_staff_modify_if_manager'): + return True + elif user.has_perm('AuthApp.UI_company_staff_modify_any'): + return True + + if view.action in ('destroy',): + perm = user.has_perm('AuthApp.UI_company_staff_delete') + return perm + + if view.action in ( + 'retrieve', 'list', 'list_by_company_id', 'list_by_office_id', + 'possible_departaments_list', 'possible_company_positions_list' + ): + perm = user.has_perm('AuthApp.UI_company_staff_retrieve_any_no_staff') + if not perm: + perm = user.has_perm('AuthApp.UI_company_staff_retrieve') + return perm + + # if view.basename == u'userprofile': + # + # if view.action in ('create',): + # perm = user.has_perm('AuthApp.add_userprofile') + # return perm + # + # if view.action in ('update', 'partial_update'): + # perm = user.has_perm('AuthApp.change_userprofile') + # return perm + # + # if view.action in ('destroy',): + # perm = user.has_perm('AuthApp.delete_userprofile') + # return perm + # + # if view.action in ('retrieve', 'list'): + # return True + + return False diff --git a/AuthApp/api/v1/user/user_api_serializars.py b/AuthApp/api/v1/user/user_api_serializars.py new file mode 100644 index 0000000..0d7d99d --- /dev/null +++ b/AuthApp/api/v1/user/user_api_serializars.py @@ -0,0 +1,124 @@ +from rest_framework import serializers +from ....models import * +from ....funcs import fullname_for_user + + +class Personal_change_mailing_status_Serializer(serializers.Serializer): + mailing_ID = serializers.IntegerField() + mailing_status = serializers.BooleanField() + + +class User_sync_Serializer(serializers.ModelSerializer): + id_1s = serializers.SerializerMethodField() + + def get_id_1s(self, obj): + return obj.user_profile.id_1s + + class Meta: + model = User + fields = ( + 'id_1s', + ) + + +class Profile_list_Serializer(serializers.ModelSerializer): + from GeneralApp.api.v1.communications.communications_api_serializers import Communications_create_Serializer + + office_name = serializers.SerializerMethodField(required=False) + company_name = serializers.SerializerMethodField(required=False) + manager_name = serializers.SerializerMethodField(required=False) + company_client_type = serializers.SerializerMethodField(required=False) + + communications = Communications_create_Serializer(many=True) + + def get_company_client_type(self, obj): + try: + if obj and obj.company_obj: + return obj.company_obj.client_type + else: + return None + except: + return None + + def get_office_name(self, obj): + try: + if obj and obj.office: + return obj.office.name + except: + return None + + return None + + def get_company_name(self, obj): + try: + if obj and obj.company_obj: + return obj.company_obj.name + except: + return None + + return None + + def get_manager_name(self, obj): + try: + if obj and obj.company_obj and obj.company_obj.manager_obj: + return '{0} {1}'.format(obj.company_obj.manager_obj.last_name, obj.company_obj.manager_obj.first_name) + except: + return None + + return None + + class Meta: + model = UserProfile + fields = ( + 'id', + 'enable', + 'company_obj', 'company_name', + 'company_position', 'company_client_type', + 'phone', + 'delivery_address', + 'office', 'office_name', + 'departament', + 'document_sign_person', + 'work_start_D', + 'work_finish_D', + 'birthdate', + 'comment', + 'communications', + 'priority_connect_type', + 'modifiedDT', + 'mailing_sets', + 'manager_name' + ) + + +class User_list_Serializer(serializers.ModelSerializer): + full_name = serializers.SerializerMethodField('get_full_name_user', required=False) + + # user_profile = Profile_list_Serializer() + + def get_full_name_user(self, obj): + name = fullname_for_user(obj) + if not name: + name = obj.email + return name + + class Meta: + model = User + fields = ( + 'id', 'full_name' + ) + + +class Personal_list_Serializer(User_list_Serializer): + user_profile = Profile_list_Serializer() + + class Meta: + model = User + fields = ( + 'id', 'full_name', 'first_name', 'last_name', 'is_active', 'is_staff', 'user_profile', 'email' + ) + extra_kwargs = { + 'email': {'required': 'False'}, + 'first_name': {'required': 'False'}, + 'last_name': {'required': 'False'}, + } diff --git a/AuthApp/api/v1/user/user_api_views.py b/AuthApp/api/v1/user/user_api_views.py new file mode 100644 index 0000000..f15e014 --- /dev/null +++ b/AuthApp/api/v1/user/user_api_views.py @@ -0,0 +1,1180 @@ +from BaseModels.api.base_api_views import * +from .user_api_serializars import * +from ...api_permissions import Auth_API_perms +from datetime import datetime +from rest_framework.decorators import action +from django.db.models import Sum, FloatField, F, Value as V, Q, Count +from django.db.models.functions import Concat +import json +from BaseModels.mailSender import techSendMail +from operator import itemgetter +from rest_framework import status +from CompaniesApp.models import Company, DeliveryData +from BaseModels.api.api_inter import check_and_get_specific_output_format +from django.contrib.contenttypes.models import ContentType +from ..permissions.personal_api_permissions import Personal_API_perms + +from B2BApp.funcs import pay_terms_for_report_list + + +def get_kwargs_by_get_query_params(request): + kwargs = {} + stat_kwargs = {} + + if not request.user.has_perm('AuthApp.UI_managers_all_stat'): + kwargs.update({'id': request.user.id}) + + if not request.query_params: + return kwargs, stat_kwargs + else: + + try: + + params = request.query_params + + for param_key, param_val in params.items(): + + if 'for_managers_stat_DT_from' == param_key: + # param_key = 'orders_for_manager__delivery_DT__gte' + param_key = 'delivery_DT__gte' + kwargs.update({ + param_key: datetime.strptime(param_val, '%Y-%m-%d %H:%M') + }) + elif 'for_managers_stat_DT_to' == param_key: + # param_key = 'orders_for_manager__delivery_DT__lte' + param_key = 'delivery_DT__lte' + kwargs.update({ + param_key: datetime.strptime(param_val, '%Y-%m-%d %H:%M') + }) + + elif '_D_from' in param_key: + param_key = param_key.replace('_from', '__gte') + kwargs.update({ + param_key: datetime.strptime(param_val, '%Y-%m-%d') + }) + elif '_D_to' in param_key: + param_key = param_key.replace('_to', '__lte') + kwargs.update({ + param_key: datetime.strptime(param_val, '%Y-%m-%d') + }) + + elif param_key == 'regions': + param_val = json.loads(param_val) + kwargs.update({ + 'company__region__in'.format(param_key): param_val + }) + + + elif 'rel_products_in_order_for_product__order__delivery_DT_from' in param_key: + + # param_key = 'orders_for_manager__delivery_DT__gte' + param_key = 'delivery_DT__gte' + + kwargs.update({ + + param_key: datetime.strptime(param_val, '%Y-%m-%d %H:%M') + + }) + + elif 'rel_products_in_order_for_product__order__delivery_DT_to' in param_key: + + param_key = 'orders_for_manager__delivery_DT__lte' + # param_key = 'delivery_DT__lte' + + kwargs.update({ + + param_key: datetime.strptime(param_val, '%Y-%m-%d %H:%M') + + }) + + # only_stat_kwargs + elif '_from' in param_key: + param_key = param_key.replace('_from', '__gte') + stat_kwargs.update({ + param_key: param_val + }) + elif '_to' in param_key: + param_key = param_key.replace('_to', '__lte') + stat_kwargs.update({ + param_key: param_val + }) + # --------------------- + + elif param_key in ('pay_terms', 'status'): + param_val = json.loads(param_val) + kwargs.update({ + '{0}__in'.format(param_key): param_val + }) + + elif param_key in ['manager_obj', '']: + kwargs.update({ + '{0}__id'.format(param_key): param_val + }) + + elif param_key in ['series_name']: + param_val = json.loads(param_val) + kwargs.update({ + # 'orders_for_manager__rel_products_in_order_for_order__product__product_series__id__in': param_val + 'rel_products_in_order_for_order__product__product_series__id__in': param_val + }) + + elif param_key in ['brand_name']: + param_val = json.loads(param_val) + kwargs.update({ + # 'orders_for_manager__rel_products_in_order_for_order__product__brand__id__in': param_val + 'rel_products_in_order_for_order__product__brand__id__in': param_val + }) + elif param_key == 'enable': + param_val = json.loads(param_val) + kwargs.update({ + # 'orders_for_manager__rel_products_in_order_for_order__product__enable': param_val + 'rel_products_in_order_for_order__product__enable': param_val + }) + + except Exception as e: + msg = 'Ошибка разбора параметров фильтра
{0}({1})
{2}'.format( + str(e), + str(e.args), + str(request.query_params) + ) + print(msg) + title = 'ОШИБКА tE AuthApp get_kwargs_by_get_query_params' + techSendMail(msg, title) + + return kwargs, stat_kwargs + + +def get_sales_for_managers_by_goods(user, objs, ord_kwargs, receipts_kwargs, stat_kwargs=None): + objs_ids = objs.values_list('id', flat=True) + objs_ids = list(objs_ids) + + from B2BApp.models import ProductInReceipt, ProductInOrder + from django.db.models import Sum, FloatField, F + + prod_in_orders = ProductInOrder.objects.filter( + order__status_shipment='shipped', + order__defective_goods=False, + order__manager_obj__id__in=objs_ids, + **ord_kwargs + # ).exclude( + # order__pay_terms='no_pay' + ) + + allow_values_in_orders = [ + 'id', 'sales_count', + # 'paid_sum', + 'order__manager_obj__id' + ] + + allow_values_in_receipts = [ + 'id', 'receipts_count', + 'receipt__company__manager_obj__id' + ] + + if user.has_perm('B2BApp.UI_show_summ_for_sales'): + allow_values_in_orders.append('sales_sum') + allow_values_in_receipts.append('receipts_sum') + + prod_in_orders = prod_in_orders.annotate( + sales_count=F('count'), + sales_sum=F('product_sum_byn'), + # paid_sum=F('order__paid_sum_byn') + ).values( + *allow_values_in_orders + ).order_by('order__manager_obj__id') + + prod_in_orders = list(prod_in_orders) + + prod_in_receipts = ProductInReceipt.objects.filter( + receipt__receipt_type='return', + receipt__company__manager_obj__id__in=objs_ids, + **receipts_kwargs + ) + + prod_in_receipts = prod_in_receipts.annotate( + receipts_count=Sum('count', + output_field=FloatField()), + receipts_sum=Sum('product_sum_byn'), + ).values( + *allow_values_in_receipts + ).order_by('receipt__company__manager_obj__id') + + prod_in_receipts = list(prod_in_receipts) + + objs = list(objs) + objs.sort(key=itemgetter('full_name'), reverse=False) + + objs_i = 0 + while objs_i < len(objs): + obj = objs[objs_i] + obj['sales_count'] = 0 + obj['sales_sum'] = 0 + # obj['paid_sum'] = 0 + i = 0 + while i < len(prod_in_orders): + if obj['id'] == prod_in_orders[i]['order__manager_obj__id']: + obj['sales_count'] += prod_in_orders[i]['sales_count'] + if 'sales_sum' in prod_in_orders[i]: + obj['sales_sum'] += prod_in_orders[i]['sales_sum'] + # obj['paid_sum'] += prod_in_orders[i]['paid_sum'] + del prod_in_orders[i] + # break + else: + i += 1 + + i = 0 + while i < len(prod_in_receipts): + if obj['id'] == prod_in_receipts[i]['receipt__company__manager_obj__id']: + obj['sales_count'] -= prod_in_receipts[i]['receipts_count'] + if 'receipts_sum' in prod_in_receipts[i]: + obj['sales_sum'] -= prod_in_receipts[i]['receipts_sum'] + + del prod_in_receipts[i] + # break + else: + i += 1 + + if stat_kwargs: + if stat_kwargs and 'sales_count__gte' in stat_kwargs and stat_kwargs['sales_count__gte'] and objs[objs_i][ + 'sales_count'] < float(stat_kwargs['sales_count__gte']): + del objs[objs_i] + elif stat_kwargs and 'sales_count__lte' in stat_kwargs and stat_kwargs['sales_count__lte'] and objs[objs_i][ + 'sales_count'] > float(stat_kwargs['sales_count__lte']): + del objs[objs_i] + elif stat_kwargs and 'sales_sum__gte' in stat_kwargs and stat_kwargs['sales_sum__gte'] and objs[objs_i][ + 'sales_sum'] < float(stat_kwargs['sales_sum__gte']): + del objs[objs_i] + elif stat_kwargs and 'sales_sum__lte' in stat_kwargs and stat_kwargs['sales_sum__lte'] and objs[objs_i][ + 'sales_sum'] > float(stat_kwargs['sales_sum__lte']): + del objs[objs_i] + + else: + objs[objs_i]['sales_count'] = round(objs[objs_i]['sales_count'], 2) + objs[objs_i]['sales_sum'] = round(objs[objs_i]['sales_sum'], 2) + + objs_i += 1 + else: + objs_i += 1 + + return objs + + +def get_sales_for_managers_by_orders(objs, ord_kwargs, receipts_kwargs, pay_kwargs, stat_kwargs=None): + from datetime import date, timedelta + from collections import OrderedDict + + chart_data_Dict = {} + + try: + + from_date = None + to_date = None + + objs_ids = objs.values_list('id', flat=True) + objs_ids = list(objs_ids) + + from B2BApp.models import Order, ReceiptDocument, Pay + orders = Order.objects.filter( + status_shipment='shipped', + defective_goods=False, + manager_obj__id__in=objs_ids, + **ord_kwargs + ) + + if orders: + from_date = orders.order_by('delivery_DT')[0].delivery_DT.date() + to_date = orders.order_by('-delivery_DT')[0].delivery_DT.date() + + orders = orders.annotate( + sales_count=Sum('rel_products_in_order_for_order__count', + output_field=FloatField()), + sales_sum=F('order_sum_byn'), + paid_sum=F('paid_sum_byn') + ).values( + 'id', 'sales_count', 'sales_sum', 'delivery_DT', + 'paid_sum', 'pay_type', + 'manager_obj__id', 'company__name' + ).order_by('manager_obj__id') + + orders = list(orders) + + receipts = ReceiptDocument.objects.filter( + receipt_type='return', + manager_obj__id__in=objs_ids, + **receipts_kwargs + ) + + if receipts: + from_date_tmp = receipts.order_by('receipt_D')[0].receipt_D + if not from_date or from_date_tmp < from_date: + from_date = from_date_tmp + to_date_tmp = receipts.order_by('-receipt_D')[0].receipt_D + if not to_date or to_date_tmp > to_date: + to_date = to_date_tmp + + receipts = receipts.annotate( + receipts_count=Sum('rel_products_in_receipt_for_receipt__count', + output_field=FloatField()), + receipts_sum=Sum('rel_products_in_receipt_for_receipt__product_sum_byn'), + ).values( + 'id', 'receipts_count', 'receipts_sum', 'manager_obj__id', 'pay_type', 'receipt_D' # , 'company__name' + ).order_by('manager_obj__id') + + receipts = list(receipts) + + pays = Pay.objects.filter( + manager__id__in=objs_ids, + **pay_kwargs + ) + + if pays: + from_date_tmp = pays.order_by('oper_D')[0].oper_D + if not from_date or from_date_tmp < from_date: + from_date = from_date_tmp + to_date_tmp = pays.order_by('-oper_D')[0].oper_D + if not to_date or to_date_tmp > to_date: + to_date = to_date_tmp + + pay_objs = pays.values( + 'id', 'oper_D', 'sum_pay_byn', 'manager__id', + 'shipment_doc__id', 'shipment_doc__rel_orders_for_documents', + 'shipment_doc__rel_orders_for_documents__manager_obj__id', + 'company__id', 'bank_doc_D' + ).order_by('oper_D', 'company__name') + # ).order_by('company__manager_obj__id', 'oper_D') + + pays = list(pay_objs) + + # from CompaniesApp.models import Document + # its = Pay.objects.filter(documents__id=30069) + + objs = list(objs) + objs.sort(key=itemgetter('full_name'), reverse=False) + + if from_date: + from_DT = from_date + else: + from_DT = date(year=2000, month=1, day=1) + + if to_date: + to_DT = to_date + timedelta(days=1) + else: + to_DT = datetime.now().date() + timedelta(days=1) + + chart_data_Dict = OrderedDict({ + str(from_DT + timedelta(n)): { + 'sales_count': 0, + 'sales_sum': 0, + 'paid_sum': 0, + 'paid_cash': 0, + 'paid_invoice': 0 + } for n in range(int((to_DT - from_DT).days) + 1) + }) + + total_pays_cash = 0 + total_pays_invoice = 0 + + objs_i = 0 + while objs_i < len(objs): + obj = objs[objs_i] + # if obj['id'] == 31: + # print('!') + + orders_items = [] + receipts_items = [] + pays_items = [] + obj['sales_count'] = 0 + obj['sales_sum'] = 0 + obj['paid_sum'] = 0 + obj['paid_cash'] = 0 + i = 0 + while i < len(orders): + if obj['id'] == orders[i]['manager_obj__id']: + if orders[i]['sales_count']: + obj['sales_count'] += orders[i]['sales_count'] + if orders[i]['sales_sum']: + obj['sales_sum'] += orders[i]['sales_sum'] + chart_data_Dict[str(orders[i]['delivery_DT'].date())]['sales_sum'] += round( + orders[i]['sales_sum'], 2) + if orders[i]['pay_type'] == 'cash' and orders[i]['paid_sum']: + obj['paid_cash'] += orders[i]['paid_sum'] + chart_data_Dict[str(orders[i]['delivery_DT'].date())]['paid_cash'] += round( + orders[i]['paid_sum'], 2) + chart_data_Dict[str(orders[i]['delivery_DT'].date())]['paid_sum'] += round( + orders[i]['paid_sum'], 2) + + orders_items.append(orders[i]) + + del orders[i] + + # break + else: + i += 1 + + i = 0 + while i < len(receipts): + if obj['id'] == receipts[i]['manager_obj__id']: + if receipts[i]['receipts_count']: + obj['sales_count'] -= receipts[i]['receipts_count'] + if receipts[i]['receipts_sum']: + obj['sales_sum'] -= receipts[i]['receipts_sum'] + chart_data_Dict[str(receipts[i]['receipt_D'])]['sales_sum'] -= round( + receipts[i]['receipts_sum'], 2) + receipts_items.append(receipts[i]) + + if receipts[i]['pay_type'] == 'cash': + if receipts[i]['receipts_sum']: + obj['paid_cash'] -= receipts[i]['receipts_sum'] + chart_data_Dict[str(receipts[i]['receipt_D'])]['paid_cash'] -= round( + receipts[i]['receipts_sum'], 2) + + chart_data_Dict[str(receipts[i]['receipt_D'])]['paid_sum'] -= round( + receipts[i]['receipts_sum'], 2) + + del receipts[i] + + # break + else: + i += 1 + + i = 0 + obj['paid_invoice'] = 0 + while i < len(pays): + + if obj['id'] == pays[i]['manager__id'] and pays[i]['sum_pay_byn']: + + # if pays[i]['id'] == 2879: + # print('!') + + obj['paid_invoice'] += pays[i]['sum_pay_byn'] + pays_items.append(pays[i]) + + chart_data_Dict[str(pays[i]['oper_D'])]['paid_invoice'] += round(pays[i]['sum_pay_byn'], 2) + chart_data_Dict[str(pays[i]['oper_D'])]['paid_sum'] += round(pays[i]['sum_pay_byn'], 2) + + del pays[i] + # elif obj['id'] == pays[i]['shipment_doc__rel_orders_for_documents__manager_obj__id']: + # obj['paid_sum'] += pays[i]['sum_pay_byn'] + # pays_items.append(pays[i]) + # del pays[i] + # break + else: + i += 1 + + if stat_kwargs: + if stat_kwargs and 'sales_count__gte' in stat_kwargs and stat_kwargs['sales_count__gte'] and \ + objs[objs_i][ + 'sales_count'] < float(stat_kwargs['sales_count__gte']): + del objs[objs_i] + elif stat_kwargs and 'sales_count__lte' in stat_kwargs and stat_kwargs['sales_count__lte'] and \ + objs[objs_i][ + 'sales_count'] > float(stat_kwargs['sales_count__lte']): + del objs[objs_i] + elif stat_kwargs and 'sales_sum__gte' in stat_kwargs and stat_kwargs['sales_sum__gte'] and objs[objs_i][ + 'sales_sum'] < float(stat_kwargs['sales_sum__gte']): + del objs[objs_i] + elif stat_kwargs and 'sales_sum__lte' in stat_kwargs and stat_kwargs['sales_sum__lte'] and objs[objs_i][ + 'sales_sum'] > float(stat_kwargs['sales_sum__lte']): + del objs[objs_i] + + else: + objs[objs_i]['sales_count'] = round(objs[objs_i]['sales_count'], 2) + objs[objs_i]['sales_sum'] = round(objs[objs_i]['sales_sum'], 2) + + objs_i += 1 + else: + objs_i += 1 + + obj['paid_sum'] = obj['paid_cash'] + obj['paid_invoice'] + + # obj['sales_count'] = round(obj['sales_count'], 2) + # obj['sales_sum'] = round(obj['sales_sum'], 2) + obj['paid_sum'] = round(obj['paid_sum'], 2) + + total_pays_cash += obj['paid_cash'] + total_pays_invoice += obj['paid_invoice'] + + # print('{0}. ord_count={1}, recepts_count={2}'.format(obj['full_name'], len(orders_items), len(receipts_items))) + + except Exception as e: + msg = 'Ошибка сбора статистики
{0}({1})'.format( + str(e), + str(e.args), + ) + print(msg) + title = 'ОШИБКА tE AuthApp get_sales_for_managers_by_orders' + techSendMail(msg, title) + + return objs, chart_data_Dict + + +class v1_Personal_ViewSet(APIViewSet_ModelClass): + queryset = User.objects.filter().order_by('last_name', 'first_name') + serializer_class = Personal_list_Serializer + permission_classes = (Personal_API_perms,) + + def get_serializer_class(self): + + try: + if self.action == 'list': + return Personal_list_Serializer + if self.action == 'add_communication_item': + from GeneralApp.api.v1.communications.communications_api_serializers import \ + Communications_create_Serializer + return Communications_create_Serializer + if self.action == 'retrieve': + return Personal_list_Serializer + if self.action == 'change_mailing_status': + return Personal_change_mailing_status_Serializer + + except (KeyError, AttributeError): + pass + + return super(v1_Personal_ViewSet, self).get_serializer_class() + + def get_queryset(self, *args, **kwargs): + + user = self.request.user + if not user or user.is_anonymous or not user.is_active: + return [] + + persons = super(v1_Personal_ViewSet, self).get_queryset() + persons = persons.exclude(is_superuser=True) + + if user.has_perm('AuthApp.UI_company_staff_modify_any') or user.has_perm( + 'AuthApp.UI_company_staff_retrieve_any_no_staff'): + return persons.filter(user_profile__enable=True) + elif user.has_perm('AuthApp.UI_company_staff_modify_if_manager'): + persons = persons.filter( + # user_profile__company_obj__manager_obj=user, + user_profile__enable=True + ) + return persons + elif user.has_perm('AuthApp.UI_company_staff_modify_if_staff_company'): + persons = persons.filter( + user_profile__company_obj=user.user_profile.company_obj, + user_profile__enable=True + ) + return persons + + return [] + + @action(methods=['GET'], detail=False) + def possible_departaments_list(self, request, *args, **kwargs): + + data_list = UserProfile.objects.exclude( + departament=None + ).exclude( + company_position='' + ).values_list('departament', flat=True).order_by('departament').distinct() + return Response(data_list) + + @action(methods=['PATCH'], detail=True) + def change_mailing_status(self, request, *args, **kwargs): + from MailingApp.models import Mailing + + obj = self.get_object() + + data = request.data + + res = obj.user_profile.change_mailing_status(data['mailing_ID'], data['mailing_status']) + + return Response(res) + + @action(methods=['GET'], detail=True) + def possible_mailings(self, request, *args, **kwargs): + from MailingApp.models import Mailing + + person = self.get_object() + + if not person or not person.user_profile or not person.user_profile.company_obj or \ + not person.user_profile.company_obj.manager_obj: + raise serializers.ValidationError( + u'user is not connected w company' + ) + + manager_of_person = person.user_profile.company_obj.manager_obj + + if manager_of_person != request.user: + from AuthApp.funcs import get_subordinate_staff + subordinate_users_for_cur_user = get_subordinate_staff(request.user) + if not manager_of_person in subordinate_users_for_cur_user: + raise serializers.ValidationError( + u'нет прав для доступа к управлению рассылкой' + ) + + mailings = request.user.user_profile.get_allow_mailings(for_user=person) + if mailings: + mailings = list( + mailings.values('id', 'name', 'enable', 'pass_weekends', 'last_send_DT', 'next_send_DT', 'creator__id', + 'creator_name')) + + connected_mailings_ids = person.user_profile.connected_mailings.all().values_list('id', flat=True) + + for mailing in mailings: + if mailing['id'] in connected_mailings_ids: + mailing['connected'] = True + else: + mailing['connected'] = False + + if mailing['next_send_DT']: + mailing['next_send_DT'] = str(mailing['next_send_DT']) + if mailing['last_send_DT']: + mailing['last_send_DT'] = str(mailing['last_send_DT']) + + return Response(mailings) + + @action(methods=['GET'], detail=False) + def possible_company_positions_list(self, request, *args, **kwargs): + + data_list = UserProfile.objects.exclude( + company_position=None + ).exclude( + company_position='' + ).values_list('company_position', flat=True).order_by('company_position').distinct() + return Response(data_list) + + @action(methods=['GET'], detail=True) + def get_connected_mailings(self, request, *args, **kwargs): + + obj = self.get_object() + data_list = obj.user_profile.get_connected_mailings().values('id', 'name', 'enable') + return Response(data_list) + + @action(methods=['POST'], detail=True) + def add_communication_item(self, request, *args, **kwargs): + + data = request.data + if not 'connection_name' in data or not 'connection_value' in data: + raise serializers.ValidationError( + u'Ошибка, недостаточно данных' + ) + + obj = self.get_object() + + from GeneralApp.views import create_communication_item + + ct = ContentType.objects.get_for_model(obj.user_profile) + res = create_communication_item(ct, obj.user_profile.id, data) + if 'error' in res: + raise serializers.ValidationError(res['error']) + + serializer = Personal_list_Serializer(obj) + + return Response(serializer.data) + + def list(self, request, *args, **kwargs): + + filter_kwargs, stat_kwargs = get_kwargs_by_get_query_params(request) + + res = check_and_get_specific_output_format(self, data=None, filename='personal_list.xlsx') + if res: + return res + + return super(v1_Personal_ViewSet, self).list(request, *args, **kwargs) + + @action(methods=['GET'], detail=True) + def list_by_company_id(self, request, *args, **kwargs): + if not kwargs or not 'pk' in kwargs: + raise serializers.ValidationError( + u'Ошибка, не указан id компании' + ) + + try: + company = Company.objects.get(id=kwargs['pk']) + except Company.DoesNotExist: + raise serializers.ValidationError( + u'Ошибка, неверный id компании' + ) + + personal_objs = self.get_queryset().filter(user_profile__company_obj=company) + serializer = Personal_list_Serializer(personal_objs, many=True) + + res = check_and_get_specific_output_format(self, serializer.data, '{0}_personal.xlsx'.format(str(company.name))) + if res: + return res + + data = serializer.data + + return Response(data) + + @action(methods=['GET'], detail=True) + def list_by_office_id(self, request, *args, **kwargs): + if not kwargs or not 'pk' in kwargs: + raise serializers.ValidationError( + u'Ошибка, не указан id подразделения' + ) + + try: + office = DeliveryData.objects.get(id=kwargs['pk']) + except DeliveryData.DoesNotExist: + raise serializers.ValidationError( + u'Ошибка, неверный id компании' + ) + + personal_objs = self.queryset.filter(user_profile__office=office) + serializer = Personal_list_Serializer(personal_objs, many=True) + + res = check_and_get_specific_output_format(self, data=serializer.data, + filename='{0}_personal.xlsx'.format(str(office.name))) + if res: + return res + + return Response(serializer.data) + + def partial_update(self, request, *args, **kwargs): + user = self.get_object() + + data = request.data + + if 'id' in data and not data['id']: + return Response({'error': 'Изменение невозможно. Нет идентификатора пользователя'}, + status=status.HTTP_400_BAD_REQUEST) + + if 'email' in data and not data['email']: + return Response({'error': 'Изменение невозможно. Нет email идентификатора пользователя'}, + status=status.HTTP_400_BAD_REQUEST) + + users = User.objects.filter( + id=user.id + ) + + if not users: + return Response({'error': 'Изменение невозможно. Пользователь не найден'}, + status=status.HTTP_400_BAD_REQUEST) + + company = None + profile_data = None + if 'user_profile' in data: + profile_data = data['user_profile'] + if 'company_obj' in profile_data and profile_data['company_obj']: + try: + company = Company.objects.get(id=profile_data['company_obj']) + profile_data['company_obj'] = company + + except Company.DoesNotExist: + return Response({'error': u'Компания не существует'}, + status=status.HTTP_400_BAD_REQUEST) + + office = None + if 'office' in profile_data and profile_data['office']: + try: + office = DeliveryData.objects.get(id=profile_data['office']) + profile_data['office'] = office + except Company.DoesNotExist: + return Response({'error': u'Подразделение не существует'}, + status=status.HTTP_400_BAD_REQUEST) + + if 'id' in profile_data: + del profile_data['id'] + if 'office__name' in profile_data: + del profile_data['office__name'] + + user_data = data + + if profile_data: + del user_data['user_profile'] + if 'id' in user_data: + del user_data['id'] + if 'full_name' in user_data: + del user_data['full_name'] + if 'email' in user_data: + user_data['username'] = user_data['email'] + + if users.count() > 1: + return Response({'error': u'Ошибка, найдено более одного пользователя, соответствующего id'}, + status=status.HTTP_400_BAD_REQUEST) + + users.update(**user_data) + user = users.first() + + if profile_data: + UserProfile.objects.filter(user=user).update(**profile_data) + + if 'mailing_sets' in profile_data: + if not company and user.user_profile: + company = user.user_profile.company_obj + if company: + company.update_mailing_on() + + serializer = Personal_list_Serializer(user) + data = serializer.data + + # data.update(get_editable_fields_list_Dict(request.user, self.action, serializer.Meta.fields)) + + return Response(data) + + def create(self, request, *args, **kwargs): + + data = request.data + + serializer = self.get_serializer_class()(data=data) + if not serializer.is_valid(): + raise serializers.ValidationError(serializer.errors) + + errors_Dict = {} + + if not 'email' in data or not data['email']: + errors_Dict.update({'email': 'обязательное поле'}) + # return Response( + # { + # 'errors': { + # 'Создание невозможно. Не не введен email' + # } + # }, + # status=status.HTTP_400_BAD_REQUEST + # ) + else: + + try: + user = User.objects.get( + email=data['email'] + ) + + errors_Dict.update({'email': 'Создание невозможно. Пользователь с указанным email уже существует'}) + + # return Response({'error': u'Создание невозможно. Пользователь с указанным email уже существует'}, + # status=status.HTTP_400_BAD_REQUEST) + + except User.DoesNotExist: + pass + + company = None + office = None + if 'office' in data['user_profile'] and data['user_profile']['office']: + try: + office = DeliveryData.objects.get(id=data['user_profile']['office']) + data['user_profile']['office'] = office + company = office.company + except Company.DoesNotExist: + return Response({'error': u'Подразделение не существует'}, + status=status.HTTP_400_BAD_REQUEST) + + if not company and ('company_obj' in data['user_profile'] and data['user_profile']['company_obj']): + try: + company = Company.objects.get(id=data['user_profile']['company_obj']) + data['user_profile']['company_obj'] = company + except Company.DoesNotExist: + return Response({'error': u'Компания не существует'}, + status=status.HTTP_400_BAD_REQUEST) + + if 'creator' in data['user_profile'] and data['user_profile']['creator']: + try: + creator = User.objects.get(id=data['user_profile']['creator']) + data['user_profile']['creator'] = company + except Company.DoesNotExist: + pass + # return Response({'error': u'Пользователь не существует'}, + # status=status.HTTP_400_BAD_REQUEST) + + if errors_Dict: + raise serializers.ValidationError(errors_Dict) + # return Response({ + # 'errors': errors_Dict + # }) + + from ....views import create_personal_user + res = create_personal_user(data, request.user) + if 'error' in res: + raise serializers.ValidationError(res['error']) + # return Response({'error': res['error']}, + # status=status.HTTP_400_BAD_REQUEST) + + serializer = Personal_list_Serializer(res['user']) + data = serializer.data + + # data.update(get_editable_fields_list_Dict(request.user, self.action, serializer.Meta.fields)) + + return Response(data) + + +class v1_Managers_ViewSet(APIViewSet_ModelClass): + queryset = User.objects.filter(is_staff=True).order_by('last_name', 'first_name') + serializer_class = User_list_Serializer + permission_classes = (Auth_API_perms,) + + def get_serializer_class(self): + + try: + if self.action == 'list': + return User_list_Serializer + if self.action == 'retrieve': + return User_list_Serializer + + except (KeyError, AttributeError): + pass + + return super(v1_Managers_ViewSet, self).get_serializer_class() + + def get_queryset(self, *args, **kwargs): + + user = self.request.user + if user.has_perm('AuthApp.UI_managers_all_stat'): + managers = User.objects.filter( + is_staff=True, + ) + else: + managers = User.objects.filter( + id=user.id, + is_staff=True, + ) + + managers = managers.order_by('-is_active', 'last_name', 'first_name') + + # if self.action in ('stat_list', ): + # + # filter_kwargs, stat_kwargs = get_kwargs_by_get_query_params(self.request) + # if kwargs: + # filter_kwargs.update(kwargs) + # + # managers = managers.filter( + # + # orders_for_manager__status_shipment='shipped', + # orders_for_manager__defective_goods=False, + # **filter_kwargs + # ).annotate( + # # sales_count=Sum('orders_for_manager__rel_products_in_order_for_order__count', + # # output_field=FloatField()), + # # sales_sum=Sum('orders_for_manager__rel_products_in_order_for_order__product_sum_byn', + # # output_field=FloatField()), + # # paid_sum=Sum('orders_for_manager__paid_sum_byn', + # # output_field=FloatField()), + # full_name=Concat(F('last_name'), V(' '), F('first_name')) + # ).filter( + # **stat_kwargs + # ).order_by( + # 'full_name' + # ).distinct() + # # return managers + # + # + # if self.action in ('get_sales_stat_by_productid', ): + # + # filter_kwargs, stat_kwargs = get_kwargs_by_get_query_params(self.request) + # if kwargs: + # filter_kwargs.update(kwargs) + # + # managers = managers.filter( + # is_staff=True, + # orders_for_manager__status_shipment='shipped', + # orders_for_manager__defective_goods=False, + # **filter_kwargs + # ).annotate( + # # sales_count=Sum('orders_for_manager__rel_products_in_order_for_order__count', + # # output_field=FloatField()), + # # sales_sum=Sum('orders_for_manager__rel_products_in_order_for_order__product_sum_byn', + # # output_field=FloatField()), + # # paid_sum=Sum('orders_for_manager__paid_sum_byn', + # # output_field=FloatField()), + # full_name=Concat(F('last_name'), V(' '), F('first_name')) + # ).filter( + # **stat_kwargs + # ).order_by( + # 'full_name' + # ).distinct() + # return managers + # else: + # if self.request.user: + # user = self.request.user + # if user.active and not self.request.user.is_anonymus: + # managers = User.objects.filter(id=user.id) + # return managers + + # return super(v1_Managers_ViewSet, self).get_queryset() + return managers + + def list(self, request, *args, **kwargs): + res = check_and_get_specific_output_format(self, data=None, filename='managers_list.xlsx') + if res: + return res + + res = super(v1_Managers_ViewSet, self).list(request, *args, **kwargs) + return res + + @action(methods=['GET'], detail=False) + def get_all_staff(self, request, *args, **kwargs): + + from ....funcs import get_all_staff + objs = get_all_staff() + serializer = self.get_serializer_class()(objs, many=True) + return Response(serializer.data) + + @action(methods=['GET'], detail=False) + def get_sales_managers_only(self, request, *args, **kwargs): + + from ....funcs import get_sales_managers + objs = get_sales_managers() + serializer = self.get_serializer_class()(objs, many=True) + return Response(serializer.data) + + @action(methods=['GET'], detail=False) + def get_sales_department_staff(self, request, *args, **kwargs): + + from ....funcs import get_sales_department_staff + objs = get_sales_department_staff() + serializer = self.get_serializer_class()(objs, many=True) + return Response(serializer.data) + + @action(methods=['GET'], detail=False) + def get_sales_department_staff(self, request, *args, **kwargs): + + from ....funcs import get_sales_department_staff + objs = get_sales_department_staff() + serializer = self.get_serializer_class()(objs, many=True) + return Response(serializer.data) + + @action(methods=['GET'], detail=False) + def get_subordinate_staff(self, request, *args, **kwargs): + objs = None + + if request.user.is_superuser: + objs = self.get_queryset() + else: + from ....funcs import get_subordinate_staff + objs = get_subordinate_staff(request.user, include_cur_user=True) + + if not objs: + objs = self.get_queryset().filter(id=request.user.id) + + serializer = self.get_serializer_class()(objs, many=True) + return Response(serializer.data) + + @action(methods=['GET'], detail=False) + def stat_list(self, request, *args, **kwargs): + + filter_kwargs, stat_kwargs = get_kwargs_by_get_query_params(request) + + period_from = None + period_to = None + region_ids = None + if 'delivery_DT__gte' in filter_kwargs: + period_from = filter_kwargs['delivery_DT__gte'] + if 'delivery_DT__lte' in filter_kwargs: + period_to = filter_kwargs['delivery_DT__lte'] + if 'company__region__in' in filter_kwargs: + region_ids = filter_kwargs['company__region__in'] + + from ....stat_funcs import managers_sales_by_period_n_regions + managers, chart_Data = managers_sales_by_period_n_regions( + period_from=period_from, period_to=period_to, + region_ids=region_ids + ) + + if request.query_params and 'chart' in request.query_params and request.query_params[ + 'chart'] == 'line': + return Response(chart_Data) + else: + return Response(managers) + + res = check_and_get_specific_output_format(self, data=managers, filename='managers_stat.xlsx') + if res: + return res + + return Response(managers) + + @action(methods=['GET'], detail=True) + def get_sales_stat_by_productid(self, request, *args, **kwargs): + from django.db.models import Sum, FloatField, F, Value as V + + filter_kwargs, stat_kwargs = get_kwargs_by_get_query_params(request) + + ords_kwargs = {} + + ords_kwargs.update({ + 'enable': True, + 'status_shipment': 'shipped', + 'defective_goods': False, + 'rel_products_in_order_for_order__product__id': kwargs['pk'] + }) + + rec_kwargs = { + 'receipt_type': 'return', + 'enable': True, + } + + for key, val in filter_kwargs.items(): + if 'delivery_DT__gte' in key: + ords_kwargs.update({'delivery_DT__gte': val}) + rec_kwargs.update({'receipt_D__gte': val}) + + elif 'delivery_DT__lte' in key: + ords_kwargs.update({'delivery_DT__lte': val}) + rec_kwargs.update({'receipt_D__lte': val}) + elif 'company__region__in' in key: + ords_kwargs.update({key: val}) + rec_kwargs.update({'company__region__in': val}) + else: + ords_kwargs.update({key: val}) + + if kwargs and 'pk' in kwargs: + rec_kwargs.update({ + 'rel_products_in_receipt_for_receipt__product__id': kwargs['pk'] + }) + + # print(str(ords_kwargs)) + from B2BApp.models import Order, ReceiptDocument + ords_ids = Order.objects.filter( + **ords_kwargs + ).exclude( + pay_terms='no_pay' + ).values_list('id', flat=True) + + receipts_ids = ReceiptDocument.objects.filter( + **rec_kwargs + ).values_list('id', flat=True) + + managers = self.get_queryset().annotate( + full_name=Concat(F('last_name'), V(' '), F('first_name')) + # ).filter( + # **stat_kwargs + ).order_by( + 'full_name' + ).distinct( + ).values( + 'id', 'full_name', + ) + + ord_managers = managers.filter(orders_for_manager__in=ords_ids) + + recipt_managers = managers.filter(receipts_for_manager__in=receipts_ids) + + managers = ord_managers.union(recipt_managers) + + receipt_kwargs = { + 'enable': True, + } + ord_kwargs = { + 'order__enable': True, + 'order__pay_terms__in': pay_terms_for_report_list, + } + for key, val in filter_kwargs.items(): + if 'delivery_DT__gte' in key: + ord_kwargs.update({'order__delivery_DT__gte': val}) + receipt_kwargs.update({'receipt__receipt_D__gte': val}) + + elif 'delivery_DT__lte' in key: + ord_kwargs.update({'order__delivery_DT__lte': val}) + receipt_kwargs.update({'receipt__receipt_D__lte': val}) + + elif 'company__region__in' in key: + ord_kwargs.update({'order__company__region__in': val}) + receipt_kwargs.update({'receipt__company__region__in': val}) + + ord_kwargs.update({ + 'product__id': kwargs['pk'], + }) + + receipt_kwargs.update({ + 'product__id': kwargs['pk'], + # 'receipt__receipt_type': 'return' + }) + + if managers: + managers = get_sales_for_managers_by_goods(request.user, managers, ord_kwargs, receipt_kwargs) + + filename = 'managers_sales_by_product_{0}.xlsx'.format(str(kwargs['pk'])) + res = check_and_get_specific_output_format(self, data=managers, filename=filename) + if res: + return res + + return Response(managers) diff --git a/AuthApp/forms.py b/AuthApp/forms.py new file mode 100644 index 0000000..a956ff4 --- /dev/null +++ b/AuthApp/forms.py @@ -0,0 +1,70 @@ +# coding=utf-8 +from django import forms +from django.contrib.auth.forms import AuthenticationForm +from django.utils.translation import ugettext_lazy as _ +from django.core.exceptions import ValidationError +from .models import * +# from djng.styling.bootstrap3.forms import Bootstrap3ModelForm +# from djng.forms import fields, NgModelFormMixin, NgFormValidationMixin, NgModelForm +# from datetimepicker.widgets import DateTimePicker +# from datetimepicker.helpers import js_loader_url + + + +# class PersonForm(NgModelFormMixin, NgFormValidationMixin, NgModelForm, Bootstrap3ModelForm): +# +# form_name = 'person_form' +# scope_prefix = 'person_data' +# +# class Meta: +# model = UserProfile +# fields = ['name', 'departament', 'company', 'company_position', +# 'days_to_order_cancellation_default', 'days_to_pay_default', +# 'pay_terms', 'birthdate', +# 'phone', 'email', 'discount', 'document_sign_person'] + + + +def emailValid(value): + if User.objects.filter(username=value, is_active=True): + raise ValidationError(_(u'пользователь с таким e-mail уже существует, воспользуйтесь восстановлением пароля')) + +def check_authorizationBy_cleaned_data(cleaned_data): + from django.contrib.auth import authenticate + print('check_authorizationBy_cleaned_data') + username = cleaned_data.get('username') + password = cleaned_data.get('password') + + user = authenticate(username=username, password=password) + # print(user) + if user: + # if user.is_active: + return user + +def check_activate_by_user(reg_user): + print('check_activate_by_user') + if reg_user: + if reg_user.is_active: + return True + + return False + +class LoginForm(AuthenticationForm): + username = forms.EmailField(label=_('Email'), widget=forms.TextInput()) + password = forms.CharField(min_length=8, label=_('Пароль'), widget=forms.PasswordInput(render_value=False)) + + def clean(self): + # print('check') + cleaned_data = super(LoginForm, self).clean() + reg_user = check_authorizationBy_cleaned_data(cleaned_data) + # print(reg_user) + if not reg_user: + raise ValidationError(_(u'Пользователь с введенными регистрационными данными не зарегистрирован. Проверьте правильность ввода e-mail и пароля.')) + else: + if not check_activate_by_user(reg_user): + raise ValidationError(_(u'Указанная учетная запись не была Активирована')) + return cleaned_data + + +class ResetPassword_byEmail_Form(AuthenticationForm): + email = forms.EmailField(label=_('Email'), widget=forms.TextInput()) \ No newline at end of file diff --git a/AuthApp/funcs.py b/AuthApp/funcs.py new file mode 100644 index 0000000..ab9aa8c --- /dev/null +++ b/AuthApp/funcs.py @@ -0,0 +1,392 @@ +# -*- coding: utf-8 -*- +from .models import * +from datetime import datetime, timedelta, date +from django.db.models import Q, F, Value as V +from django.db.models.functions import ExtractYear, Concat, Coalesce +from functools import reduce +from operator import or_ + +sales_department_groups = [ + 'Отдел продаж: Начальник отдела продаж', + 'Отдел продаж: Менеджер отдела продаж', + 'Отдел продаж: Региональный руководитель отдела продаж', + 'Отдел продаж: Стажер отдела продаж' +] + +heads_of_sales_groups = [ + 'Отдел продаж: Начальник отдела продаж', + 'Отдел продаж: Региональный руководитель отдела продаж', +] + + +def get_personal_companies_by_managers_list(managers_list, filter_kwargs={}, exclude_kwargs={}, only_emails=False): + try: + users = User.objects.filter( + user_profile__company_obj__manager_obj__in=managers_list, + **filter_kwargs + ).exclude( + **exclude_kwargs + ).order_by('-is_active', 'last_name', 'first_name') + + if only_emails: + users = list(users.values_list('email', flat=True)) + + except Exception as e: + msg = f'get_personal_companies_by_managers_list Error = {str(e)}' + users = [] + + return users + + +def get_head_staffs_by_user_email(user_email, only_emails=False): + try: + user = User.objects.get(email=user_email) + heads = get_head_staffs(user.user_profile, only_emails=True) + except User.DoesNotExist as e: + heads = None + return heads + + +def get_head_staffs(user_profile_or_user_ID, only_emails=False): + if type(user_profile_or_user_ID) == str: + user = User.objects.get(id=int(user_profile_or_user_ID)) + user_profile = user.user_profile + else: + user_profile = user_profile_or_user_ID + user = user_profile.user + + groups = user.groups.all() + groups_name_list = groups.values_list('name', flat=True) + + Q_list = [] + + if user_profile.is_sales_department_staff(): + if 'Отдел продаж: Менеджер отдела продаж' in groups_name_list: + groups_list = ['Отдел продаж: Региональный руководитель отдела продаж'] + kwargs = { + 'groups__name__in': groups_list, + 'user_profile__regions__in': user.user_profile.regions.all(), + } + Q_list.append(Q(**kwargs)) + + if not 'Отдел продаж: Начальник отдела продаж' in groups_name_list: + groups_list = ['Отдел продаж: Начальник отдела продаж'] + kwargs = {'groups__name__in': groups_list} + Q_list.append(Q(**kwargs)) + + # groups_list = ['Управляющий'] + # kwargs.update({'groups__name__in': groups_list}) + if not 'Управляющий' in groups_name_list: + kwargs = {'is_superuser': True} + Q_list.append(Q(**kwargs)) + + Q_obj = reduce(lambda p1, p2: (p1 | p2), Q_list) + heads = User.objects.filter( + Q_obj, + is_active=True, + is_staff=True + ) + + heads = heads.distinct().order_by('-is_active', 'last_name', 'first_name') + + if only_emails: + heads = list(heads.values_list('email', flat=True)) + + return heads + + +def get_bosses_and_subordinate_staff_ids(user, only_active=False, include_cur_user=False, only_emails=False): + users = [] + + kwargs = { + 'is_staff': True, + } + + if only_active: + kwargs.update({'is_active': True}) + + groups = user.groups.all() + groups_name_list = groups.values_list('name', flat=True) + + Q_obj = Q() + + if 'Отдел продаж: Начальник отдела продаж' in groups_name_list: + Q_obj.add(Q(is_superuser=True), Q.OR) + + elif 'Отдел продаж: Региональный руководитель отдела продаж' in groups_name_list: + Q_obj.add(Q(is_superuser=True), Q.OR) + Q_obj.add(Q(groups__name='Отдел продаж: Начальник отдела продаж'), Q.OR) + + elif 'Отдел продаж: Менеджер отдела продаж' in groups_name_list: + Q_obj.add(Q(is_superuser=True), Q.OR) + Q_obj.add(Q(groups__name='Отдел продаж: Начальник отдела продаж'), Q.OR) + Q_obj.add(Q( + groups__name='Отдел продаж: Региональный руководитель отдела продаж', + user_profile__regions__in=user.user_profile.regions.all()), Q.OR) + + subordinate_users = get_subordinate_staff( + user, only_active=only_active, include_cur_user=include_cur_user, only_emails=only_emails + ) + res_val = 'id' + if only_emails: + res_val = 'user_profile__email' + + subordinate_users = subordinate_users.values_list(res_val, flat=True) + + if user.is_superuser: + users = subordinate_users + else: + bosses = User.objects.filter(Q_obj, **kwargs).values_list(res_val, flat=True) + users = set(subordinate_users) | set(bosses) + + return users + + +def get_subordinate_sales_staff(user, groups, only_active=False, only_emails=False, include_cur_user=False): + kwargs = { + 'is_staff': True, + } + + if only_active: + kwargs.update({'is_active': True}) + + Q_obj = Q() + + if 'Отдел продаж: Начальник отдела продаж' in groups: + kwargs_groups = [ + 'Отдел продаж: Менеджер отдела продаж', + 'Отдел продаж: Региональный руководитель отдела продаж', + 'Отдел продаж: Стажер отдела продаж' + ] + Q_obj.add(Q(groups__name__in=kwargs_groups), Q.OR) + + + elif u'Отдел продаж: Региональный руководитель отдела продаж' in groups: + kwargs_groups = [ + 'Отдел продаж: Менеджер отдела продаж', + 'Отдел продаж: Стажер отдела продаж' + ] + Q_obj.add(Q(groups__name__in=kwargs_groups, user_profile__regions__in=user.user_profile.regions.all()), Q.OR) + + elif u'Отдел продаж: Менеджер отдела продаж' in groups: + kwargs_groups = [ + 'Отдел продаж: Стажер отдела продаж' + ] + Q_obj.add(Q(groups__name__in=kwargs_groups, user_profile__regions__in=user.user_profile.regions.all()), Q.OR) + + elif 'Отдел закупок: Начальник отдела закупок' in groups: + kwargs_groups = [ + 'Отдел закупок: Менеджер отдела закупок', + ] + Q_obj.add(Q(groups__name__in=kwargs_groups), Q.OR) + + if include_cur_user: + Q_obj.add(Q(id=user.id), Q.OR) + + users = User.objects.filter(Q_obj, **kwargs).exclude(is_superuser=True) + users = users.distinct().order_by('-is_active', 'last_name', 'first_name') + + # if kwargs: + # other_users = User.objects.filter( + # **kwargs + # ).exclude( + # is_superuser=True + # ) + # users = users.union(other_users) + # users = users.distinct().order_by('-is_active', 'last_name', 'first_name') + + if only_emails: + users = list(users.values_list('email', flat=True)) + + return users + + +def get_subordinate_staff(user, only_active=False, include_cur_user=False, only_emails=False): + users = [] + + groups = user.groups.all() + groups_name_list = groups.values_list('name', flat=True) + + # отдел продаж + if set(groups_name_list): + users = get_subordinate_sales_staff(user, groups_name_list, include_cur_user=include_cur_user) + + if not users and user.is_superuser: + users = get_all_staff() + + if not users and include_cur_user: + users = User.objects.filter(id=user.id) + + if users and only_active: + users = users.filter(is_active=True) + + if only_emails: + users = list(users.values_list('email', flat=True)) + + return users + + +def get_all_staff(only_active=False, only_emails=False): + kwargs = { + 'is_staff': True, + } + if only_active: + kwargs.update({'is_active': True}) + + users = User.objects.filter(**kwargs).order_by('-is_active', 'last_name', 'first_name') + + if only_emails: + users = list(users.values_list('email', flat=True)) + return users + + +def get_managers_wo_work(): + managers = get_sales_managers() + managers = managers.filter( + user_profile__last_web_request__lt=datetime.now() - timedelta(minutes=20), + user_profile__last_web_request__contains=date.today() + ) + + return managers + + +def get_birthdays(manager=None, for_next_count_days=None, only_first=False): + from_D = datetime.now() + Q_obj = Q() + Q_obj.add(Q(Q(birthdate__day=from_D.day) & Q(birthdate__month=from_D.month)), Q.OR) + + # в for_next_count_days количество будущих дней для которых собираем дни рождения + if for_next_count_days: + for i in range(1, for_next_count_days): + Q_obj.add(Q(Q(birthdate__day=(from_D + timedelta(days=i)).day) & Q( + birthdate__month=(from_D + timedelta(days=i)).month)), Q.OR) + + kwargs = {} + if manager: + kwargs.update({ + 'company_obj__manager_obj': manager, + + }) + + from .models import UserProfile + user_profiles = UserProfile.objects.filter( + Q_obj, + # birthdate__lte=(datetime.now() + timedelta(days=3)).date(), + # birthdate__gte=datetime.now().date() + **kwargs + ).annotate( + age=datetime.now().year - ExtractYear('birthdate') + ).order_by('company_obj__manager_obj', 'birthdate') + + if user_profiles and only_first: + return user_profiles[0] + + return user_profiles + + +def get_sales_managers(only_active=False, only_emails=False): + from CompaniesApp.models import Company + + kwargs = { + 'is_staff': True, + 'companies_for_manager__company_type': 'client', + } + if only_active: + kwargs.update({'is_active': True}) + + managers = User.objects.filter(**kwargs).exclude(companies_for_manager=None).distinct().order_by('-is_active', + 'last_name', + 'first_name') + + # managers = Company.objects.filter( + # company_type='client' + # ).values_list('manager_obj', flat=True).distinct() + + if only_emails: + managers = list(managers.values_list('email', flat=True)) + return managers + + +def get_full_names_by_id(ids): + names = User.objects.filter( + id__in=ids + ).annotate( + fullname=Concat(F('last_name'), V(' '), F('first_name')) + ).values('id', 'fullname') + names = {item['id']: item['fullname'] for item in names} + + return names + + +def fullname_for_user(user): + full_name = '%s %s' % (user.last_name, user.first_name) + return full_name.strip() + + +def get_heads_of_sales(only_active=False, only_emails=False): + kwargs = { + 'is_staff': True, + } + + Q_obj = Q(groups__name__in=heads_of_sales_groups) | Q(is_superuser=True) + + if only_active: + kwargs.update({'is_active': True}) + + users = User.objects.filter( + Q_obj, + **kwargs + ).order_by('-is_active', 'last_name', 'first_name') + + if only_emails: + users = list(users.values_list('email', flat=True)) + return users + + +def get_sales_department_staff(regions=None, only_active=False, only_emails=False): + kwargs = { + 'is_staff': True, + } + + if not regions: + sales_department_groups = [ + 'Отдел продаж: Менеджер отдела продаж', 'Отдел продаж: Начальник отдела продаж', + 'Отдел продаж: Региональный руководитель отдела продаж', + 'Отдел продаж: Стажер отдела продаж' + ] + kwargs.update({ + 'groups__name__in': sales_department_groups, + }) + + else: + sales_department_groups = [ + 'Отдел продаж: Менеджер отдела продаж', + 'Отдел продаж: Региональный руководитель отдела продаж', + 'Отдел продаж: Стажер отдела продаж' + ] + kwargs.update({ + 'groups__name__in': sales_department_groups, + }) + + if only_active: + kwargs.update({'is_active': True}) + + users = User.objects.filter(**kwargs).exclude(is_superuser=True).order_by('-is_active', 'last_name', 'first_name') + if only_emails: + users = list(users.values_list('email', flat=True)) + return users + + +def get_marketing_departament_persons(only_active=False, only_emails=False): + kwargs = { + 'is_staff': True, + 'groups__name': 'Маркетинг: Маркетолог', + } + if only_active: + kwargs.update({'is_active': True}) + + users = User.objects.filter(**kwargs).order_by('-is_active', 'last_name', 'first_name') + + if only_emails: + users = list(users.values_list('email', flat=True)) + return users diff --git a/AuthApp/migrations/0001_initial.py b/AuthApp/migrations/0001_initial.py new file mode 100644 index 0000000..22ae6a7 --- /dev/null +++ b/AuthApp/migrations/0001_initial.py @@ -0,0 +1,44 @@ +# Generated by Django 4.2.1 on 2023-05-16 09:47 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='UserProfile', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.TextField(blank=True, help_text='Название', null=True, verbose_name='Название')), + ('name_plural', models.TextField(blank=True, null=True, verbose_name='Название (множественное число)')), + ('order', models.IntegerField(blank=True, null=True, verbose_name='Очередность отображения')), + ('createDT', models.DateTimeField(auto_now_add=True, verbose_name='Дата и время создания')), + ('modifiedDT', models.DateTimeField(blank=True, null=True, verbose_name='Дата и время последнего изменения')), + ('enable', models.BooleanField(db_index=True, default=True, verbose_name='Включено')), + ('json_data', models.JSONField(blank=True, default=dict, verbose_name='Дополнительные данные')), + ('UI_lang', models.CharField(choices=[('ru', 'Russian'), ('en', 'English')], max_length=2, verbose_name='Язык интерфейса')), + ('nick_name', models.CharField(blank=True, max_length=250, null=True, verbose_name='Псевдоним')), + ('authCode', models.CharField(blank=True, max_length=32, null=True)), + ('phone', models.CharField(blank=True, max_length=100, null=True, verbose_name='Телефон')), + ('birthdate', models.DateField(blank=True, null=True, verbose_name='Дата рождения')), + ('comment', models.TextField(blank=True, null=True, verbose_name='Дополнительные сведения')), + ('referal_link', models.TextField(verbose_name='Реферальная ссылка')), + ('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user_profile', to=settings.AUTH_USER_MODEL, verbose_name='id пользователя')), + ], + options={ + 'verbose_name': 'Профиль', + 'verbose_name_plural': 'Профили', + 'ordering': ('user__last_name', 'user__first_name'), + 'permissions': (), + }, + ), + ] diff --git a/AuthApp/migrations/0002_userprofile_answer_success_count_userprofile_balance_and_more.py b/AuthApp/migrations/0002_userprofile_answer_success_count_userprofile_balance_and_more.py new file mode 100644 index 0000000..34cc063 --- /dev/null +++ b/AuthApp/migrations/0002_userprofile_answer_success_count_userprofile_balance_and_more.py @@ -0,0 +1,41 @@ +# Generated by Django 4.2.1 on 2023-05-16 14:01 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('AuthApp', '0001_initial'), + ] + + operations = [ + migrations.AddField( + model_name='userprofile', + name='answer_success_count', + field=models.IntegerField(default=0, verbose_name='Успешных ответов'), + ), + migrations.AddField( + model_name='userprofile', + name='balance', + field=models.FloatField(default=0, verbose_name='Баланс'), + ), + migrations.AddField( + model_name='userprofile', + name='questions_count', + field=models.IntegerField(default=0, verbose_name='Задано вопросов'), + ), + migrations.AlterField( + model_name='userprofile', + name='comment', + field=models.TextField(blank=True, null=True, verbose_name='Комментарий'), + ), + migrations.AlterField( + model_name='userprofile', + name='user', + field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='user_profile', to=settings.AUTH_USER_MODEL, verbose_name='Пользователь'), + ), + ] diff --git a/AuthApp/migrations/__init__.py b/AuthApp/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/AuthApp/models.py b/AuthApp/models.py new file mode 100644 index 0000000..718d6e3 --- /dev/null +++ b/AuthApp/models.py @@ -0,0 +1,83 @@ +# coding=utf-8 +from __future__ import unicode_literals +from django.contrib.auth.models import User +from django.db import models +from django.utils.translation import gettext_lazy as _ +from django.db.models.signals import post_save, pre_save +from django.contrib.contenttypes.fields import GenericRelation +from BaseModels.base_models import BaseModel +from datetime import datetime +from django.conf import settings + + + + +def user_name_str(self): + return f'{self.last_name} {self.first_name}' + + +User.add_to_class("__str__", user_name_str) + + +class UserProfile(BaseModel): + + user = models.OneToOneField(User, verbose_name=_('Пользователь'), related_name=u'user_profile', + null=True, blank=True, on_delete=models.CASCADE) + + UI_lang = models.CharField(max_length=2, verbose_name=_('Язык интерфейса'), choices=settings.LANGUAGES) + + nick_name = models.CharField(max_length=250, verbose_name=_('Псевдоним'), null=True, blank=True) + + authCode = models.CharField(max_length=32, null=True, blank=True) + + phone = models.CharField(max_length=100, verbose_name=_('Телефон'), null=True, blank=True) + + birthdate = models.DateField(verbose_name=_(u'Дата рождения'), null=True, blank=True) + + comment = models.TextField(verbose_name=_('Комментарий'), null=True, blank=True) + + referal_link = models.TextField(verbose_name=_('Реферальная ссылка')) + balance = models.FloatField(verbose_name=_('Баланс'), default=0) + questions_count = models.IntegerField(verbose_name=_('Задано вопросов'), default=0) + answer_success_count = models.IntegerField(verbose_name=_('Успешных ответов'), default=0) + + + def __str__(self): + if self.user: + return '{0} {1}'.format(self.user.last_name, self.user.first_name) + else: + return str(self.id) + + + class Meta: + permissions = ( + + ) + + verbose_name = _('Профиль') + verbose_name_plural = _('Профили') + ordering = ('user__last_name', 'user__first_name') + + + +# @receiver(post_save, sender=User) +def create_user_profile(sender, instance, created, **kwargs): + if created: + UserProfile.objects.create(user=instance) + + +post_save.connect(create_user_profile, sender=User, dispatch_uid='post_save_connect') + + +# @receiver(pre_save, sender=User) +def preSaveUser(sender, instance, **kwargs): + if not instance.email: + instance.email = str(instance.username).lower() + + try: + instance.user_profile.modifiedDT = datetime.now() + except: + pass + + +pre_save.connect(preSaveUser, sender=User, dispatch_uid='pre_save_connect') diff --git a/AuthApp/stat_funcs.py b/AuthApp/stat_funcs.py new file mode 100644 index 0000000..f073b6c --- /dev/null +++ b/AuthApp/stat_funcs.py @@ -0,0 +1,194 @@ +from BaseModels.mailSender import techSendMail +from django.db.models import Sum, FloatField, F, Value as V, Count, OuterRef, Subquery, Q, Max, Exists +from django.db.models.functions import Concat, Length +from operator import itemgetter +from datetime import datetime +import copy +from .models import User +from datetime import datetime, date, timedelta +from collections import OrderedDict + + +def managers_sales_by_period_n_regions(period_from=None, period_to=None, region_ids=None, kwargs=None, sum_w_prev_data=None): + from CompaniesApp.models import Company + + log_begin_DT = datetime.now() + msg = 'managers_sales_by_period_n_regions start - {0}
---------------

'.format(str(log_begin_DT)) + print(msg) + + + stat_kwargs = {} + queryset_kwargs = {} + all_managers = False + separate_managers_sales_by_link_in_order = False + if kwargs: + if 'stat_kwargs' in kwargs: + stat_kwargs.update(kwargs['stat_kwargs']) + + + if 'all_managers' in kwargs: + all_managers = kwargs['all_managers'] + + + if 'queryset_kwargs' in kwargs: + queryset_kwargs.update(kwargs['queryset_kwargs']) + + + if 'separate_managers_sales_by_link_in_order' in kwargs: + separate_managers_sales_by_link_in_order = kwargs['separate_managers_sales_by_link_in_order'] + + + from B2BApp.stat_funcs import get_sales_by_period_n_regions + orders, receipts, pays = get_sales_by_period_n_regions(period_from=period_from, period_to=period_to, region_ids=region_ids, kwargs=kwargs) + + + ids = [] + if not all_managers: + + ord_manager_ids = set(item['manager_obj__id'] for item in orders) + receipt_manager_ids = set(item['manager_obj__id'] for item in receipts) + pay_manager_ids = set(item['manager__id'] for item in pays) + + ids = set.union(ord_manager_ids, receipt_manager_ids, pay_manager_ids) + + queryset_kwargs.update({'id__in': ids}) + + if not sum_w_prev_data: + managers = User.objects.filter( + **queryset_kwargs + ).annotate( + full_name=Concat(F('last_name'), V(' '), F('first_name')), + ).order_by('full_name').values( + 'id', 'full_name' + ) + + objs = list(managers) + objs.sort(key=itemgetter('full_name'), reverse=False) + if None in ids: + objs.append({'id': None, 'full_name': 'не назначен менеджер'}) + else: + objs = copy.deepcopy(sum_w_prev_data) + + from_date = None + to_date = None + if orders: + orders.sort(key=itemgetter('delivery_DT'), reverse=False) + from_date = orders[0]['delivery_DT'].date() + to_date = orders[-1]['delivery_DT'].date() + + if from_date: + from_DT = from_date + else: + from_DT = date(year=2000, month=1, day=1) + + if to_date: + to_DT = to_date + timedelta(days=1) + else: + to_DT = datetime.now().date() + timedelta(days=1) + + chart_data_Dict = OrderedDict({ + str(from_DT + timedelta(n)): { + 'sales_count': 0, + 'sales_sum': 0, + 'paid_sum': 0, + 'paid_cash': 0, + 'paid_invoice': 0 + } for n in range(int((to_DT - from_DT).days) + 1) + }) + + + objs_i = 0 + while objs_i < len(objs): + obj_i = objs[objs_i] + + # if separate_managers_sales_by_link_in_order: + + + if not 'sales_count' in obj_i: + obj_i['sales_count'] = 0 + if not 'sales_sum' in obj_i: + obj_i['sales_sum'] = 0 + if not 'paid_sum' in obj_i: + obj_i['paid_sum'] = 0 + if not 'receipts_sum' in obj_i: + obj_i['receipts_sum'] = 0 + if not 'receipts_count' in obj_i: + obj_i['receipts_count'] = 0 + # # if not 'top_sales_80' in obj_i: + # # обнуляем все при каждой итерации + # obj_i['top_sales_80'] = '' + # obj_i['manager_top_sales_80'] = '' + + # if obj_i['id'] == 657: + # print('!') + + obj_orders = list(filter(lambda item: item['manager_obj__id'] == obj_i['id'], orders)) + cash_orders = list(filter(lambda item: item['pay_type'] == 'cash', obj_orders)) + + obj_pays = list(filter(lambda item: item['manager__id'] == obj_i['id'], pays)) + + obj_receipts = list(filter(lambda item: item['manager_obj__id'] == obj_i['id'], receipts)) + cash_receipts = list(filter(lambda item: item['pay_type'] == 'cash' and item['receipts_sum'], obj_receipts)) + + if obj_orders: + obj_i['sales_sum'] += round(sum(item['sales_sum'] for item in obj_orders if item['sales_sum']), 2) + obj_i['sales_count'] += round(sum(item['sales_count'] for item in obj_orders if item['sales_count']), 2) + if cash_orders: + # добавляем в оплаты кэшевые операции потому как они отсутствуют в платежах + obj_i['paid_sum'] += round(sum(item['paid_sum'] for item in cash_orders if item['paid_sum']), 2) + + # if obj_i['id'] == 1199: + # print('!') + + if obj_receipts: + obj_i['receipts_sum'] += round( + sum(item['receipts_sum'] for item in obj_receipts if item['receipts_sum']), 2) + obj_i['receipts_count'] += round( + sum(item['receipts_count'] for item in obj_receipts if item['receipts_count']), 2) + + obj_i['sales_sum'] -= obj_i['receipts_sum'] + obj_i['sales_count'] -= obj_i['receipts_count'] + + if cash_receipts: + # минусуем из оплат кэшевые операции потому как возвраты + obj_i['paid_sum'] -= round(sum(item['receipts_sum'] for item in cash_receipts if item['receipts_sum']), 2) + + if obj_pays: + obj_i['paid_sum'] += round(sum(item['sum_pay_byn'] for item in obj_pays if item['sum_pay_byn']), 2) + + obj_i['indicative_sales'] = round(( obj_i['sales_sum'] + obj_i['paid_sum']) / 2, 2) + + required_del_client = False + if stat_kwargs: + if stat_kwargs and 'sales_count__gte' in stat_kwargs and stat_kwargs['sales_count__gte'] and \ + objs[objs_i][ + 'sales_count'] < float(stat_kwargs['sales_count__gte']): + required_del_client = True + elif stat_kwargs and 'sales_count__lte' in stat_kwargs and stat_kwargs['sales_count__lte'] and \ + objs[objs_i][ + 'sales_count'] > float(stat_kwargs['sales_count__lte']): + required_del_client = True + elif stat_kwargs and 'sales_sum__gte' in stat_kwargs and stat_kwargs['sales_sum__gte'] and objs[objs_i][ + 'sales_sum'] < float(stat_kwargs['sales_sum__gte']): + required_del_client = True + elif stat_kwargs and 'sales_sum__lte' in stat_kwargs and stat_kwargs['sales_sum__lte'] and objs[objs_i][ + 'sales_sum'] > float(stat_kwargs['sales_sum__lte']): + required_del_client = True + + # if 'months_count' in kwargs: + # obj_i['middle_sum_for_month'] = round( obj_i['indicative_sales'] / kwargs['months_count'], 2) + # # else: + # # print('!') + + if required_del_client: + del objs[objs_i] + else: + objs_i += 1 + + msg = 'managers_sales_by_period_n_regions finish - {0} (processing time = {1}
---------------

'.format( + str(datetime.now()), + str(datetime.now() - log_begin_DT) + ) + print(msg) + + return objs, chart_data_Dict \ No newline at end of file diff --git a/AuthApp/tests.py b/AuthApp/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/AuthApp/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/AuthApp/urls.py b/AuthApp/urls.py new file mode 100644 index 0000000..a268e24 --- /dev/null +++ b/AuthApp/urls.py @@ -0,0 +1,57 @@ +# coding=utf-8 +from django.conf.urls import url +# from AuthApp.js_views import * +# from AuthApp.import_funcs import * +from AuthApp.views import * +from django.contrib.auth import views + +urlpatterns = [ + + # ajax ---------------- + # url(r'^login$', user_login_View_ajax, name='user_login_View_ajax'), + # url(r'^login_confirm$', user_login_confirm_ajax, name='user_login_confirm_ajax'), + # + # url(r'^logout$', user_logout_ajax, name='user_logout_View_ajax'), + # url(r'^logout_confirm$', user_logout_confirm_ajax, name='user_logout_confirm_ajax'), + # + # url(r'^check_exists_email$', check_exists_email_ajax, name='check_exists_email_ajax'), + # + # url(r'^registration$', user_registration_View_ajax, name='user_registration_View_ajax'), + # url(r'^user_registration_send_confirmation_mail$', + # user_registration_send_confirmation_mail_ajax, name='user_registration_send_confirmation_mail_ajax'), + # + # url(r'^password_recovery$', password_recovery_View_ajax, name='password_recovery_View_ajax'), + # url(r'^password_reset$', password_reset_send_mail_ajax, name='password_reset_send_mail_ajax'), + # + # url(r'^registration_by_order_data_and_send_confirmation_mail$', + # registration_by_order_data_and_send_confirmation_mail_ajax, name='registration_by_order_data_and_send_confirmation_mail_ajax'), + # + # + # # ----------------------- + # + # url(r'^check_user_registration_and_activate/(?P[\d+]*)/(?P[0-9a-z\+\-\_]+)$', + # check_user_registration_and_activate, + # name='check_user_registration_and_activate'), + # + # # url(r'^user/password/reset/$', + # # 'django.contrib.auth.views.password_reset', + # # {'post_reset_redirect' : '/user/password/reset/done/', + # # 'password_reset_form': ResetForm + # # }, + # # name="password_reset"), + # # url(r'^user/password/reset/done/$', views.password_reset_done, name='password_reset_done'), + # url(r'^reset/(?P[0-9A-Za-z_\-]+)/(?P[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', + # views.password_reset_confirm, name='password_reset_confirm'), + # url(r'^reset/done/$', views.password_reset_complete, name='password_reset_complete'), + # + # + # # import + # url(r'^import_one_user/(?P[\d+]*)$', + # import_json_mipp_user_by_id, name='import_one_mipp'), + # url(r'^import_web_users$', + # import_json_mipp_webUsers, name='import_mipp_webUsers'), + # + # url(r'^import_invoices_for_user_by_user_id/(?P[\d+]*)$', + # import_invoices_for_user_by_user_id, name='import_invoices_for_user_by_user_id'), + +] \ No newline at end of file diff --git a/AuthApp/views.py b/AuthApp/views.py new file mode 100644 index 0000000..5fd6218 --- /dev/null +++ b/AuthApp/views.py @@ -0,0 +1,292 @@ +# coding=utf-8 + +from django.shortcuts import render + +from uuid import uuid1 +from AuthApp.models import * +from django.contrib import auth +from django.http import HttpResponse, Http404 +from django.template import loader, RequestContext +from django.contrib.auth.decorators import login_required +from BaseModels.mailSender import techSendMail +from django.utils.translation import ugettext as _ +from datetime import datetime + + +def create_personal_user(data, creator): + + try: + + user_id = str(uuid1().hex)[:10] + user_name = data['email'] + mail = user_name + user = User.objects.create_user(username=user_name, email=mail, password=user_id) + + if 'first_name' in data and data['first_name']: + user.first_name = data['first_name'] + if 'last_name' in data and data['last_name']: + user.last_name = data['last_name'] + user.is_staff = False + user.is_active = False + user.is_superuser = False + # user.set_password(user_id) + user.save() + + user_communications_ads_list = [] + + if 'office__name' in data['user_profile']: + del data['user_profile']['office__name'] + if 'communications' in data['user_profile']: + + user_communications_ads_list.extend(data['user_profile']['communications']) + del data['user_profile']['communications'] + + if not 'creator' in data['user_profile'] and creator: + data['user_profile']['creator'] = creator + + profiles = UserProfile.objects.filter(user=user).update(**data['user_profile']) + + if user_communications_ads_list: + from GeneralApp.funcs import create_communications_items_by_list + comm_objs = create_communications_items_by_list(user.user_profile, user_communications_ads_list) + + user.refresh_from_db() + + return { + 'name' : mail, + 'pass' : user_id, + 'user' : user + } + except Exception as e: + return { + 'error': 'Ошибка добавление нового пользователя = {0}'.format(str(e)), + } + + + +def decode_get_param(data): + import base64 + import json + + d = data['data'].encode() + token_data = base64.b64decode(d) + + try: + request_data = token_data.decode('utf8') + except: + request_data = token_data + + data = json.loads(request_data) + + return data + +def check_user_key(data): + # print(u'check_user_key') + + + # try: + # user_id = int(data[0]) + # except: + # user_id = None + # try: + # user1S_id = int(data[1]) + # + # company_id = data[2] + # user_key = data[3] + + res = u'Key Broken' + # user_id = data[u'userId'] + # user_key = data[u'userKey'] + # user1S_id = data['user1S_id'] + # company_id = data[u'companyId'] + + data = decode_get_param(data) + + try: + token = UserTokens.objects.get( + user_id=data['pk'], + user_key=data['code'], + user1S_id=data['1S_pk'], + company_id = data['comp_pk'] + ) + res = u'Accept' + # print(u'try1 ok') + except UserTokens.DoesNotExist: + # если не найден id и ключ + try: + token = UserTokens.objects.get( + user_id=data['pk'] + ) + # techSendMail( + # u'user try access by id={0}, key={1}, c_id={2}'.format( + # str(user_id), + # str(user_key), + # str(company_id) + # ) + # ) + + res = u'Key Broken' + # print(u'try2 ok') + except UserTokens.DoesNotExist: + # если не найден id + token = UserTokens.objects.create( + user_id=data['pk'], + user_key=data['code'], + company_id=data['comp_pk'] + ) + res = u'Accept' + # print(u'except ok') + + return res + + + +def recovery_password_user(request, uidb64=None, token=None): + from django.contrib.auth.views import PasswordResetConfirmView + + return PasswordResetConfirmView(request=request, uidb64=uidb64, token=token + ) + + +# def recovery_password_user_complete(request, user_id, authCode): +# from django.contrib.auth.forms import SetPasswordForm +# from AuthApp.funcs import sendActivationMail +# +# try: +# user = User.objects.get(id=user_id, mipp_user__authMailCode=authCode) +# +# except: +# user = None +# +# if user: +# +# user.set_password() +# +# if reg_user.is_active: +# activated_early = True +# else: +# reg_user.backend = 'AuthApp.backends.BaldeniniAuthBackend' +# login(request, reg_user) +# +# reg_user.is_active = True +# reg_user.save() +# sendActivationMail(reg_user) +# +# else: +# raise Http404 +# # print(reg_user) +# +# context = { +# 'user': user, +# 'activated_early' : activated_early, +# } +# +# t = loader.get_template('pages/profile.html') +# +# rContext = RequestContext(request, context) +# return HttpResponse(t.render(rContext)) + + + +# def check_user_registration_and_activate(request, user_id, authCode): +# from django.contrib.auth import authenticate, login +# from AuthApp.funcs import sendActivationMail +# +# try: +# reg_user = User.objects.get(id=user_id, mipp_user__authMailCode=authCode) +# +# except: +# reg_user = None +# +# activated_early = False +# +# if reg_user: +# +# if reg_user.is_active: +# activated_early = True +# else: +# reg_user.backend = 'AuthApp.backends.BaldeniniAuthBackend' +# login(request, reg_user) +# +# reg_user.is_active = True +# reg_user.save() +# sendActivationMail(reg_user) +# +# else: +# raise Http404 +# # print(reg_user) +# +# context = { +# 'reg_user': reg_user, +# 'activated_early' : activated_early, +# } +# +# +# +# t = loader.get_template('admin_pages/pages/Profile/profile.html') +# +# # rContext = RequestContext(request, context) +# # return HttpResponse(t.render(rContext)) +# return HttpResponse(t.render(context, request)) + + + +def create_temporary_user(): + from django.utils.translation import ugettext as _ + user_id = str(uuid1().hex)[:10] + user_name = u'user'+user_id + mail = user_id+u'@truenergy.by' + user = User.objects.create_user(mail, mail,user_id) + user.first_name = _(u'незарег. пользователь') + user.last_name = u'' + user.is_staff = False + user.is_active = True + user.is_superuser = False + user.set_password(user_id) + # print(u'user_create_pass', user.password) + # p = user.get_profile() + # p.address = '' + # p.phone = '' + # p.group = None + # p.discount = 0 + # p.pay_balance = 0 + # p.authMailCode = uuid1().hex + # p.save() + + user.save() + user.mipp_user.temporary_user = True + user.mipp_user.save() + # print('user',user) + # print('created profile',p) + # print('user created', user) + return { + 'name' : mail, + 'pass' : user_id, + 'user' : user + } + + +def get_active_user(request): + if request.user.is_anonymous: + return None + else: + user = request.user + # try: + # bd = user.mipp_user.birthdate + # except MIPPUser.DoesNotExist: + # MIPPUser.objects.create(user=user) + + return user + + +def get_active_user_if_anonymous_create_temporary(request): + + user = get_active_user(request) + + if not user: + new_user_Dict = create_temporary_user() + user = auth.authenticate(username=new_user_Dict['name'], password=new_user_Dict['pass']) + if user: + auth.login(request, user) + + return user \ No newline at end of file diff --git a/BaseModels/SMS_sender.py b/BaseModels/SMS_sender.py new file mode 100644 index 0000000..7fb107c --- /dev/null +++ b/BaseModels/SMS_sender.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +__author__ = 'SDE' + +import urllib3 +import json + +def send_SMS(phone, text, urgent=False, staff=False): + import re + from BaseModels.mailSender import techSendMail + print('send_SMS') + + # change all + for GET request + text = text.replace(' ', '+') + text = text.encode('utf-8') + + if not staff: + phone = phone.replace(' ', '') + p = re.compile('\d{7,12}') + phone_list = p.findall(phone) + + if not phone_list: + return u'phone DoesNotExist' + + phone = phone_list[0] + + phone.encode('utf-8') + + http_request = 'http://cp.websms.by/?r=api/msg_send' \ + '&user=administrator@baldenini.by' \ + '&apikey=zTwevODOYl' \ + '&sender=Baldenini' + # '&test=1' + + if urgent: + http_request = http_request + '&urgent=1' + http_request = http_request + '&recipients=' + phone + + http_request = http_request.encode('utf-8') + + http_request = http_request + '&message=' + text + + http = urllib3.PoolManager() + + r = http.request('GET', http_request) + + r_status = json.loads(r.data) + + if r_status['status'] == 'error': + message = r_status['message'] + try: + req = http_request.decode('utf-8') + message = req + u'
' + message + # message = message.decode('utf-8') + techSendMail(message) + except: + pass + else: + message = None + + stat = { + 'status' : r_status, + 'message' : message, + } + + print('sms_status', phone, stat) + + return r_status + # return u'Accept' \ No newline at end of file diff --git a/BaseModels/__init__.py b/BaseModels/__init__.py new file mode 100644 index 0000000..14c7ff2 --- /dev/null +++ b/BaseModels/__init__.py @@ -0,0 +1 @@ +__author__ = 'SDE' diff --git a/BaseModels/admin_utils.py b/BaseModels/admin_utils.py new file mode 100644 index 0000000..16cf175 --- /dev/null +++ b/BaseModels/admin_utils.py @@ -0,0 +1,217 @@ +# -*- coding: utf-8 -*- + +__author__ = 'SDE' + +from django.contrib.admin.widgets import AdminFileWidget, AdminTextareaWidget +from django.contrib.postgres.fields import JSONField +from django.utils.safestring import mark_safe +from django.db import models +from django.contrib import admin +from django.forms import widgets +import re +import json + + +# from modeltranslation.admin import TranslationAdmin + +# from filebrowser.admin import + + +class AdminImageWidget(AdminFileWidget): + + def render(self, name, value, attrs=None, renderer=None): + output = [] + if value and getattr(value, "url", None): + output.append( + u' '.format( + url=value.url)) + output.append(super(AdminFileWidget, self).render(name, value, attrs)) + + return mark_safe(u''.join(output)) + + +class Admin_BaseIconTabularModel(admin.TabularInline): + + def formfield_for_dbfield(self, db_field, **kwargs): + formfield = super(Admin_BaseIconTabularModel, self).formfield_for_dbfield(db_field, **kwargs) + if db_field.name == 'url' or db_field.name == 'name' or db_field.name == 'title' or db_field.name == 'comment': + formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 500px'}) + if db_field.name == 'workListForServicePage': + formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 800px'}) + if db_field.name == 'seo_title': + formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 800px'}) + if db_field.name == 'seo_description' or db_field.name == 'seo_keywords': + formfield.widget = admin.widgets.AdminTextareaWidget(attrs={'style': 'width: 800px'}) + if db_field.name in ('text', 'description', 'seo_text'): + formfield.widget = admin.widgets.AdminTextareaWidget(attrs={'style': 'width: 800px'}) + return formfield + + def image_thumb(self, obj): + + try: + image_url = obj.picture.url + except: + image_url = None + + if not image_url: + try: + image_url = obj.icon.url + except: + image_url = None + + if not image_url: + try: + image_url = obj.logo.url + except: + image_url = None + + if not image_url: + try: + image_url = obj.photo.url + except: + image_url = None + + if not image_url: + try: + image_url = obj.banner.photo.url + except: + image_url = None + + if image_url: + s = str('') + return mark_safe(s) + else: + return '(none)' + + image_thumb.short_description = u'Миниатюра' + image_thumb.allow_tags = True + + +class PrettyJSONWidget(widgets.Textarea): + + def format_value(self, value): + try: + value = json.dumps(json.loads(value), indent=2, sort_keys=True, ensure_ascii=False) + # these lines will try to adjust size of TextArea to fit to content + row_lengths = [len(r) for r in value.split('\n')] + self.attrs['rows'] = min(max(len(row_lengths) + 2, 10), 30) + self.attrs['cols'] = min(max(max(row_lengths) + 2, 40), 120) + return value + except Exception as e: + print("Error while formatting JSON: {}".format(e)) + # logger.warning("Error while formatting JSON: {}".format(e)) + return super(PrettyJSONWidget, self).format_value(value) + + +class Admin_BaseIconModel(admin.ModelAdmin): + # from codemirror import CodeMirrorTextarea + # codemirror_widget = CodeMirrorTextarea( + # mode="python", + # theme="cobalt", + # config={ + # 'fixedGutter': True + # }, + # ) + + def formfield_for_dbfield(self, db_field, **kwargs): + + formfield = super(Admin_BaseIconModel, self).formfield_for_dbfield(db_field, **kwargs) + if db_field.name == 'url' or db_field.name == 'name' or db_field.name == 'title': + formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 500px'}) + if db_field.name == 'workListForServicePage': + formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 800px'}) + if db_field.name == 'seo_title': + formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 800px'}) + if db_field.name == 'seo_description' or db_field.name == 'seo_keywords': + formfield.widget = admin.widgets.AdminTextareaWidget(attrs={'style': 'width: 800px'}) + if db_field.name in ('lexems',): + formfield.widget = admin.widgets.AdminTextareaWidget(attrs={'style': 'width: 80%'}) + if db_field.name in ('type_full_name', 'properties_title_name', 'where_buy_title_name'): + formfield.widget = admin.widgets.AdminTextInputWidget(attrs={'style': 'width: 80%'}) + + return formfield + + formfield_overrides = { + models.ImageField: {'widget': AdminImageWidget}, + JSONField: {'widget': PrettyJSONWidget} + } + + def image_thumb(self, obj): + + try: + image_url = obj.avatar.url + except: + image_url = None + + if not image_url: + try: + image_url = obj.picture.url + except: + image_url = None + + if not image_url: + try: + image_url = obj.icon.url + except: + image_url = None + + if not image_url: + try: + image_url = obj.main_photo().url + except: + image_url = None + + if not image_url: + try: + image_url = obj.offer.main_photo().url + except: + image_url = None + + if not image_url: + try: + image_url = obj.rel_product.main_photo().url + except: + image_url = None + + if not image_url: + try: + image_url = obj.logo.url + except: + image_url = None + + if not image_url: + try: + image_url = obj.photo.url + except: + image_url = None + + if not image_url: + try: + image_url = obj.picture.url + except: + image_url = None + + if image_url: + s = str('') + return mark_safe(s) + else: + return '(none)' + + image_thumb.short_description = u'Миниатюра' + image_thumb.allow_tags = True + + +from modeltranslation.admin import TranslationAdmin + + +class AdminTranslation_BaseIconModel(Admin_BaseIconModel, TranslationAdmin): + class Media: + js = ( + 'https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js', + 'https://ajax.googleapis.com/ajax/libs/jqueryui/1.10.2/jquery-ui.min.js', + 'modeltranslation/js/tabbed_translation_fields.js', + # 'cked/ckeditor/ckeditor.js' + ) + css = { + 'screen': ('modeltranslation/css/tabbed_translation_fields.css',), + } diff --git a/BaseModels/api/__init__.py b/BaseModels/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/BaseModels/api/api_export_xls.py b/BaseModels/api/api_export_xls.py new file mode 100644 index 0000000..984c50e --- /dev/null +++ b/BaseModels/api/api_export_xls.py @@ -0,0 +1,43 @@ +from openpyxl import Workbook +from django.http import HttpResponse +from openpyxl.writer.excel import save_virtual_workbook + +def xls_export(data, filename): + print('xls_export') + + # wb = Workbook() + # ws = wb.active + # + # r = 1 + # for row in data: + # c = 1 + # for val in row.values(): + # try: + # ws.cell(row=r, column=c).value = val + # except: + # ws.cell(row=r, column=c).value = str(val) + # c += 1 + # + # r += 1 + # + # dims = {} + # for row in ws.rows: + # for cell in row: + # if cell.value: + # dims[cell.column] = max((dims.get(cell.column, 0), len(str(cell.value)))) + # for col, value in dims.items(): + # ws.column_dimensions[col].width = value + + # filepath = "/demo.xlsx" + # wb.save(filepath) + + # output = BytesIO() + # wb.save(output) + + from ..office_documents_utils import get_xls_file_by_data_list + xls_file = get_xls_file_by_data_list(data) + + response = HttpResponse(xls_file, content_type='application/ms-excel') + response['Content-Disposition'] = 'attachment; filename="{0}"'.format(filename) + + return response \ No newline at end of file diff --git a/BaseModels/api/api_inter.py b/BaseModels/api/api_inter.py new file mode 100644 index 0000000..66e8d9a --- /dev/null +++ b/BaseModels/api/api_inter.py @@ -0,0 +1,23 @@ + + +def check_and_get_specific_output_format(obj, data=None, filename=None): + + if obj.request.query_params and 'output_format' in obj.request.query_params and obj.request.query_params['output_format'] == 'xlsx': + + if not data: + serializer = obj.get_serializer(obj.get_queryset(), many=True) + data = serializer.data + + from .api_export_xls import xls_export + return xls_export(data, filename) + + return None + +def fix_txt_for_use_in_interlinks(txt): + txt = txt.replace('/', ' ') + txt = txt.replace('?', ' ') + txt = txt.replace(';', ' ') + txt = txt.replace(',', ' ') + txt = txt.replace('+', ' ') + txt = txt.replace(':', ' ') + return txt \ No newline at end of file diff --git a/BaseModels/api/api_middlewares.py b/BaseModels/api/api_middlewares.py new file mode 100644 index 0000000..e35d137 --- /dev/null +++ b/BaseModels/api/api_middlewares.py @@ -0,0 +1,19 @@ +# from rest_framework import viewsets +# +# class APILogMiddleware(viewsets.ModelViewSet): +# # def __init__(self, get_response): +# # self.get_response = get_response +# # One-time configuration and initialization. +# +# def __call__(self, request): +# # Code to be executed for each request before +# # the view (and later middleware) are called. +# +# response = self.get_response(request) +# +# self +# +# # Code to be executed for each request/response after +# # the view is called. +# +# return response \ No newline at end of file diff --git a/BaseModels/api/base_api_parsers.py b/BaseModels/api/base_api_parsers.py new file mode 100644 index 0000000..cb2f38a --- /dev/null +++ b/BaseModels/api/base_api_parsers.py @@ -0,0 +1,22 @@ +import codecs + +from django.conf import settings +from rest_framework.exceptions import ParseError +from rest_framework.parsers import BaseParser + +class PlainTextParser(BaseParser): + media_type = "text/plain" + + def parse(self, stream, media_type=None, parser_context=None): + """ + Parses the incoming bytestream as Plain Text and returns the resulting data. + """ + parser_context = parser_context or {} + encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET) + + try: + decoded_stream = codecs.getreader(encoding)(stream) + text_content = decoded_stream.read() + return text_content + except ValueError as exc: + raise ParseError('Plain text parse error - %s' % str(exc)) \ No newline at end of file diff --git a/BaseModels/api/base_api_permissions.py b/BaseModels/api/base_api_permissions.py new file mode 100644 index 0000000..0b4bc40 --- /dev/null +++ b/BaseModels/api/base_api_permissions.py @@ -0,0 +1,36 @@ +from rest_framework.permissions import BasePermission + +class StaffOnly_perm(BasePermission): + """ + Allows access only to staff users. + """ + def has_permission(self, request, view): + return request.user and request.user.is_staff + + +class api_1C_perm(BasePermission): + """ + Allows access only 1C users. + """ + + # def has_object_permission(self, request, view, obj): + def has_permission(self, request, view): + if request.user.id == 8751: + try: + if request.req_type == 'warehouse_import': + return True + else: + return False + except: + return False + perm = request.user.has_perm('AuthApp.1c_api') + return perm + + +class full_api_perm(BasePermission): + """ + Allows access only users w full access. + """ + def has_permission(self, request, view): + return request.user.has_perm('AuthApp.full_api') + diff --git a/BaseModels/api/base_api_serializers.py b/BaseModels/api/base_api_serializers.py new file mode 100644 index 0000000..2b4412d --- /dev/null +++ b/BaseModels/api/base_api_serializers.py @@ -0,0 +1,44 @@ +from rest_framework import serializers +from django.contrib.contenttypes.models import ContentType +from BaseModels.mailSender import techSendMail + + +class Import_Element_Srializer(serializers.Serializer): + element = serializers.JSONField() + class Meta: + fields = ( + 'element', + ) + + +class Import_Pocket_Srializer(serializers.Serializer): + timestamp = serializers.IntegerField() + warehouse = serializers.CharField() + data_list = Import_Element_Srializer(many=True) + + class Meta: + fields = ( + 'timestamp', 'warehouse', 'data_list' + ) + + + +class Generic_base_Serializer(serializers.ModelSerializer): + linked_object_type = serializers.CharField(required=False) + + def create(self, validated_data): + if 'linked_object_type' in validated_data: + try: + validated_data['content_type'] = ContentType.objects.get(model=validated_data['linked_object_type']) + del validated_data['linked_object_type'] + except Exception as e: + msg = 'Ошибка создания generic объекта
{0}({1})
{2}'.format( + str(e), + str(e.args), + str(validated_data) + ) + print(msg) + title = 'ОШИБКА tE Generic_base_Serializer create' + techSendMail(msg, title) + + return super(Generic_base_Serializer, self).create(validated_data) \ No newline at end of file diff --git a/BaseModels/api/base_api_views.py b/BaseModels/api/base_api_views.py new file mode 100644 index 0000000..c0463a6 --- /dev/null +++ b/BaseModels/api/base_api_views.py @@ -0,0 +1,356 @@ +# coding=utf-8 +from rest_framework import generics +from rest_framework.authentication import BasicAuthentication, SessionAuthentication +from rest_framework.permissions import IsAuthenticated, DjangoObjectPermissions +from rest_framework.views import APIView +from rest_framework import viewsets +from rest_framework.renderers import JSONRenderer +from rest_framework.permissions import AllowAny +from rest_framework.response import Response +from rest_framework.schemas import SchemaGenerator +from rest_framework_swagger import renderers +from BaseModels.api.base_api_permissions import * +from datetime import datetime +from GeneralApp.temp_data_funcs import add_element_in_tmp_data_list, check_exists_element_in_tmp_data_list, add_element_list_to_tmp_data +from rest_framework.utils.serializer_helpers import ReturnList +from rest_framework.decorators import action +from rest_framework import status +from django.contrib.contenttypes.models import ContentType + +from BaseModels.mailSender import techSendMail +# from BaseModels.api.api_middlewares import APILogMiddleware + + +class SwaggerSchemaView(APIView): + permission_classes = [AllowAny] + renderer_classes = [ + renderers.OpenAPIRenderer, + renderers.SwaggerUIRenderer + ] + + def get(self, request): + generator = SchemaGenerator() + schema = generator.get_schema(request=request) + + return Response(schema) + + + + +JSONCustomRenderer = JSONRenderer +JSONCustomRenderer.charset = 'utf-8' + + +class APIBasePublicClass(APIView): + # authentication_classes = (SessionAuthentication, BasicAuthentication) + permission_classes = (AllowAny,) + # renderer_classes = [JSONCustomRenderer] + pagination_class = None + + # def finalize_response(self, request, response, *args, **kwargs): + # + # res = super(APIBasePublicClass, self).finalize_response(request, response, *args, **kwargs) + # + # from CompaniesApp.models import Region + # regions = Region.objects.filter().values_list( + # 'id', 'domain' + # ).order_by('id') + # res.data.update({'regions': tuple(regions)}) + # + # return res + + + # def get(self, request, *args, **kwargs): + # + # if not 'region_id' in request.headers: + # request.headers['region_id'] = '1' + # + # return super(APIBasePublicClass, self).get(request, *args, **kwargs) + + +class APIListBaseClass(generics.ListAPIView): + # authentication_classes = (SessionAuthentication, BasicAuthentication, )# + permission_classes = (IsAuthenticated,) + pagination_class = None + +class APIBaseClass(generics.RetrieveAPIView): + # authentication_classes = (SessionAuthentication, BasicAuthentication, )# + permission_classes = (IsAuthenticated, ) + # renderer_classes = [JSONCustomRenderer] + pagination_class = None + + +class APIBaseSimplaClass(generics.GenericAPIView): + # authentication_classes = (SessionAuthentication, BasicAuthentication) + permission_classes = (IsAuthenticated,) + # renderer_classes = [JSONCustomRenderer] + pagination_class = None + +# ---------------------------- + +class APIViewSet_ModelReadOnlyClass(viewsets.ReadOnlyModelViewSet): + pass + # authentication_classes = (SessionAuthentication, BasicAuthentication, )# + permission_classes = (IsAuthenticated, ) + # renderer_classes = [JSONCustomRenderer] + pagination_class = None + + + +exclude_actions_for_logging = [] + +create_kwargs = [ + 'create', 'create_short', 'create_item', + 'copy_item', 'create_short', 'create_reminder' +] + +exclude_actions_for_logging.extend(create_kwargs) +exclude_actions_for_logging.extend([ + 'update', 'partial_update', 'destroy', 'update_items', 'update_item' +]) + +def log_save_cur_state_obj(query_data, response=None, init=False): + + if query_data.basename == 'alert' or not query_data.action in exclude_actions_for_logging: + return None + + if response and response.status_code > 299: + return None + + data_Dict = {} + data_target = 'log_{0}'.format(str(query_data.basename)) + obj_id = None + + try: + + if type(query_data.request.data) == list and query_data.request.data and len(query_data.request.data) > 0 and \ + 'id' in query_data.request.data[0]: + objs_list_ids = [obj['id'] for obj in query_data.request.data] + elif response and response.data and type(response.data) == dict and 'id' in response.data: + objs_list_ids = [response.data['id']] + elif response and response.data and getattr(response.data.serializer, 'instance', None): + objs_list_ids = [response.data.serializer.instance.id] + elif response and response.data and 'id' in response.data and response.data['id']: + objs_list_ids = [response.data['id']] + elif query_data.request.data and 'id' in query_data.request.data: + objs_list_ids = [query_data.request.data['id']] + elif 'pk' in query_data.kwargs: + objs_list_ids = [query_data.kwargs['pk']] + elif query_data.queryset: + objs_list_ids = query_data.queryset.values_list('id') + else: + return None + + # if not objs_list_ids: + # + # serializer = query_data.serializer_class() + # data = serializer(data=query_data.request.data) + # + # data_Dict = { + # 'data': data, + # 'DT': str(datetime.now()), + # 'user': str(query_data.request.user), + # 'oper_type': query_data.action, + # 'init': init + # } + # + # add_element_in_tmp_data_list('log', data_target, obj_id, data_Dict) + + objs_list = query_data.queryset.filter(id__in=objs_list_ids) + + cur_action = query_data.action + query_data.action = 'retrieve' + serializer = query_data.get_serializer_class() + query_data.action = cur_action + obj_data_list = serializer(objs_list, many=True) + + elements_list_for_add_to_tmp_data = [] + for obj_data in obj_data_list.data: + obj_id = obj_data['id'] + + # фиксим json-неподходящие поля + for item_data in obj_data.keys(): + if type(obj_data[item_data]) not in (str, int, float, dict, list, bool): + obj_data[item_data] = str(obj_data[item_data]) + + + # if init: + # if check_exists_element_in_tmp_data_list('log', data_target, obj_id): + # continue + + data_Dict = { + 'id': obj_id, + 'data': obj_data, + 'DT': str(datetime.now()), + 'user': str(query_data.request.user), + 'oper_type': query_data.action, + 'init': init + } + + # add_element_in_tmp_data_list('log', data_target, obj_id, data_Dict) + elements_list_for_add_to_tmp_data.append(data_Dict) + + add_element_list_to_tmp_data('log', data_target, init, elements_list_for_add_to_tmp_data) + + except Exception as e: + response_data = '' + if response and response.data: + response_data = str(response.data) + + msg = 'log_save_cur_state_obj fail save to log w data = {0}
{1}
{2}
response_data={3}'.format( + str(e), + 'log - ' + str(data_target) + ' - ' + str(obj_id), + str(data_Dict), + response_data + ) + techSendMail(msg) + + return 'OK' + + + +class APIViewSet_ModelClass(viewsets.ModelViewSet): + # pass + # # authentication_classes = (SessionAuthentication, BasicAuthentication, )# + # permission_classes = (IsAuthenticated, ) + # # renderer_classes = [JSONCustomRenderer] + # pagination_class = None + + + + + def initial(self, request, *args, **kwargs): + + res = super(APIViewSet_ModelClass, self).initial(request, *args, **kwargs) + if self.basename == 'alert' or not self.action in exclude_actions_for_logging: + return res + + if not self.action in create_kwargs: + log_save_cur_state_obj(self, init=True) + + return res + + + def finalize_response(self, request, response, *args, **kwargs): + + res = super(APIViewSet_ModelClass, self).finalize_response(request, response, *args, **kwargs) + if self.basename == 'alert' or not self.action in exclude_actions_for_logging: + return res + + log_save_cur_state_obj(self, response=response) + + return res + + + def create(self, request, *args, **kwargs): + obj = super(APIViewSet_ModelClass, self).create(request, *args, **kwargs) + + # data_Dict = {} + # try: + # data_Dict = { + # 'data': prepare_data_for_json(vars(obj)), + # 'DT': str(datetime.now()), + # 'user': str(request.user), + # 'oper_type': 'create' + # } + # + # add_element_in_tmp_data_list('log', 'properties_log', obj.id, data_Dict) + # except Exception as e: + # msg = 'fail save to log w data = {0}
{1}'.format( + # 'log - properties_log - ' + str(obj.id), + # str(data_Dict) + # ) + # techSendMail(msg) + + return obj + + + + def partial_update(self, request, *args, **kwargs): + if request.data: + request.data['modifiedDT'] = datetime.now() + obj = super(APIViewSet_ModelClass, self).partial_update(request, *args, **kwargs) + + # data_Dict = {} + # try: + # data_Dict = { + # 'data': prepare_data_for_json(vars(obj)), + # 'DT': str(datetime.now()), + # 'user': str(request.user), + # 'oper_type': 'create' + # } + # + # add_element_in_tmp_data_list('log', 'properties_log', obj.id, data_Dict) + # except Exception as e: + # msg = 'fail save to log w data = {0}
{1}'.format( + # 'log - properties_log - ' + str(obj.id), + # str(data_Dict) + # ) + # techSendMail(msg) + + return obj + + +class APIViewSet_ModelClass_w_Expenses(APIViewSet_ModelClass): + + @action(methods=['GET'], detail=True) + def expenses_rates(self, request, *args, **kwargs): + from ExpensesApp.api.v1.expenses_rate.expenses_rate_api_serializers import ExpensesRate_get_Serializer + model = self.serializer_class.Meta.model + + try: + obj = model.objects.get(id=kwargs['pk']) + + except model.DoesNotExist: + return Response({'error': u'ошибка получения expenses_rates'}, + status=status.HTTP_400_BAD_REQUEST) + + expenses_rates = obj.expenses_rates.all() + serializer = ExpensesRate_get_Serializer(expenses_rates, many=True) + + # if serializer.data: + # return Response(serializer.data) + + return Response(serializer.data) + + @action(methods=['GET'], detail=True) + def expenses_data(self, request, *args, **kwargs): + from ExpensesApp.api.v1.expenses_data.expenses_data_api_serializers import ExpensesData_get_Serializer + model = self.serializer_class.Meta.model + + try: + obj = model.objects.get(id=kwargs['pk']) + + except model.DoesNotExist: + return Response({'error': u'ошибка получения expenses_rates'}, + status=status.HTTP_400_BAD_REQUEST) + + expenses_data = obj.expenses_data.all() + serializer = ExpensesData_get_Serializer(expenses_data, many=True) + + # if serializer.data: + # return Response(serializer.data) + + return Response(serializer.data) + + +class APIViewSet_BaseClass(viewsets.ViewSet): + pass + # authentication_classes = (SessionAuthentication, BasicAuthentication,) # + permission_classes = (IsAuthenticated, ) + # renderer_classes = [JSONCustomRenderer] + pagination_class = None + + +# class APIBaseClass(generics.RetrieveAPIView): +# authentication_classes = (SessionAuthentication, BasicAuthentication, )# +# permission_classes = (IsAuthenticated,) +# # renderer_classes = [JSONCustomRenderer] +# pagination_class = None +# +# +# class APIBaseSimplaClass(APIView): +# authentication_classes = (SessionAuthentication, BasicAuthentication) +# permission_classes = (IsAuthenticated,) +# # renderer_classes = [JSONCustomRenderer] +# pagination_class = None \ No newline at end of file diff --git a/BaseModels/base_models.py b/BaseModels/base_models.py new file mode 100644 index 0000000..23bdef7 --- /dev/null +++ b/BaseModels/base_models.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- + +__author__ = 'SDE' + +from django.db import models +from datetime import datetime +# from ckeditor.fields import RichTextField +# from BaseModels.pil_graphic_utils import * +from django.utils.translation import gettext_lazy as _ +from django.db.models.signals import post_save, pre_save +from django.utils.text import slugify +from django.contrib.postgres.fields import JSONField +from ckeditor.fields import RichTextField + + +# add_introspection_rules([], ["^tinymce\.models\.HTMLField"]) + +class BaseModel(models.Model): + name = models.TextField(verbose_name=_('Название'), + help_text=_('Название'), null=True, blank=True) + name_plural = models.TextField(verbose_name=_('Название (множественное число)'), + null=True, blank=True) + order = models.IntegerField(verbose_name=_('Очередность отображения'), null=True, blank=True) + createDT = models.DateTimeField(auto_now_add=True, verbose_name=_('Дата и время создания')) + modifiedDT = models.DateTimeField(verbose_name=_('Дата и время последнего изменения'), null=True, blank=True) + enable = models.BooleanField(verbose_name=_('Включено'), default=True, db_index=True) + + json_data = models.JSONField(verbose_name=_('Дополнительные данные'), default=dict, blank=True) + + def __str__(self): + return self.name + + def get_node_by_name(self, node_name): + if not self.json_data or not node_name in self.json_data: + return None + + return self.json_data[node_name] + + def add_node_to_json_data(self, node_data, save=False): + if not self.json_data: + self.json_data = {} + if type(self.json_data) == dict: + self.json_data.update(node_data) + elif type(self.json_data) == list: + self.json_data.append(node_data) + + if save: + self.save(update_fields=['json_data']) + + return self.json_data + + def save(self, *args, **kwargs): + self.modifiedDT = datetime.now() + super().save(*args, **kwargs) + + class Meta: + abstract = True + + +def preSave_BaseModel(sender, instance, **kwargs): + if instance and instance.user_profile: + instance.modifiedDT = datetime.now() + + +pre_save.connect(preSave_BaseModel, sender=BaseModel, dispatch_uid='pre_save_connect') + + +class BaseModelViewPage(BaseModel): + url = models.TextField(verbose_name=_('URL привязанной страницы'), unique=True, + help_text=_( + 'можно изменить адрес страницы (!!! ВНИМАНИЕ !!! поисковые системы потеряют страницу и найдут лишь спустя неделю...месяц)')) + description = RichTextField(verbose_name=_('Краткое описание'), null=True, blank=True, # max_length=240, + help_text=_('краткое описание страницы (до 240 символов)')) + text = RichTextField(verbose_name=_('Полное описание'), null=True, blank=True, ) + # help_text=_(u'краткое описание страницы (до 240 символов)')) + picture = models.ImageField(upload_to='uploads/', verbose_name=_('Миниатюра'), null=True, blank=True, + help_text=u'') + # icon = FileBrowseField("Image", max_length=200, directory="files/", extensions=[".jpg"], blank=True, null=True) + visible = models.BooleanField(verbose_name=_('Отображать'), default=True) + background_image_left = models.ImageField(verbose_name=_('Левая подложка'), blank=True, null=True) + background_image_right = models.ImageField(verbose_name=_('Правая подложка'), blank=True, null=True) + + + seo_title = models.CharField(max_length=250, verbose_name=_('Title (80 знаков)'), null=True, blank=True) + seo_description = models.CharField(max_length=250, verbose_name=_('Description (150 знаков)'), null=True, + blank=True) + seo_keywords = models.CharField(max_length=250, verbose_name=_('Keywords (200 знаков)'), null=True, blank=True) + seo_text = RichTextField(verbose_name=_(u'Текст SEO статьи'), null=True, blank=True) + + + class Meta: + abstract = True + + def get_description_exists(self): + if self.description: + return True + return False + + def get_text_exists(self): + if self.text: + return True + return False + + +# @receiver(pre_save, sender=User) +def preSaveBaseModelViewPage(sender, instance, **kwargs): + if not sender.url: + sender.url = slugify(sender.name) + + +pre_save.connect(preSaveBaseModelViewPage, sender=BaseModelViewPage, dispatch_uid='pre_save_connect') diff --git a/BaseModels/colors/generate_colors.py b/BaseModels/colors/generate_colors.py new file mode 100644 index 0000000..4235e0c --- /dev/null +++ b/BaseModels/colors/generate_colors.py @@ -0,0 +1,150 @@ +from colorsys import hls_to_rgb, rgb_to_hls, rgb_to_hsv, hsv_to_rgb +from random import uniform, randint + +DEFAULT_LIGHTNESS = 0.5 +DEFAULT_SATURATION = 1 +DEFAULT_VARIANCE = 0.2 + + +def get_next_HSV_color(cur_color, offset_hue=0, offset_value=0, offset_saturation=0): + red = int(cur_color[0:2], base=16) + green = int(cur_color[2:4], base=16) + blue = int(cur_color[4:6], base=16) + + hue, saturation, value = rgb_to_hsv(red, green, blue) + new_hue = hue + offset_hue + new_value = value + offset_value + new_saturation = saturation - offset_saturation + + # new_hue = hue + offset_hue + # new_lightness = lightness + offset_lightness + # new_saturation = saturation + offset_saturation + + # red, green, blue = map( + # lambda v: int(v * 255), + # hls_to_rgb( + # new_hue, + # new_lightness, + # new_saturation, + # ), + # ) + + red, green, blue = hsv_to_rgb(new_hue, new_saturation, new_value) + + # red, green, blue = hls_to_rgb(hue_variant, lightness, saturation) + res = f"{int(red):02x}{int(green):02x}{int(blue):02x}" + + return res + + +def get_next_color(cur_color, offset_hue=0, offset_lightness=0, offset_saturation=0): + red = int(cur_color[0:2], base=16) + green = int(cur_color[2:4], base=16) + blue = int(cur_color[4:6], base=16) + + hue, lightness, saturation = rgb_to_hls(red, green, blue) + lightness = lightness / 255 + if saturation < 0.1: + saturation = 1 + + new_hue = hue + offset_hue + new_lightness = lightness + offset_lightness + new_saturation = saturation + offset_saturation + + if new_hue > 1: new_hue = offset_hue + if new_hue < 0: new_hue = 1 + if new_lightness > 1: new_lightness = offset_lightness + if new_lightness < 0: new_lightness = 1 + if new_saturation > 1: new_saturation = offset_saturation + if new_saturation < 0: new_saturation = 1 + + red, green, blue = map( + lambda v: int(v * 255), + hls_to_rgb( + new_hue, + new_lightness, + new_saturation, + ), + ) + + res = f"{red:02x}{green:02x}{blue:02x}" + + return res + + +class Huetify(object): + lightness: float + saturation: float + variance: float + half_variance: float + + def __init__( + self, + lightness=DEFAULT_LIGHTNESS, + saturation=DEFAULT_SATURATION, + variance=DEFAULT_VARIANCE, + ) -> None: + self.lightness = lightness + self.saturation = saturation + self.variance = variance + self.half_variance = variance / 2.0 + + def huetify_to_rgb_hex(self, hue) -> str: + hue_variant = uniform( + hue - self.half_variance, + hue + self.half_variance, + ) + red, green, blue = map( + lambda v: int(v * 255), + hls_to_rgb( + hue_variant, + self.lightness, + self.saturation, + ), + ) + return f"{red:02x}{green:02x}{blue:02x}" + + def huetify_next_variant_to_rgb_hex(self, cur_variant): + hue_variant = cur_variant + self.half_variance + red, green, blue = map( + lambda v: int(v * 255), + hls_to_rgb( + hue_variant, + self.lightness, + self.saturation, + ), + ) + return red, green, blue + + @property + def reddish(self): + return self.huetify_to_rgb_hex(0) + + @property + def greenish(self): + return self.huetify_to_rgb_hex(0.333) + + @property + def blueish(self): + return self.huetify_to_rgb_hex(0.666) + + def blue_colors(self, cur_variant=None): + if not cur_variant: + cur_variant = 0.666 - self.half_variance + return self.huetify_next_variant_to_rgb_hex(cur_variant=cur_variant) + + @property + def yellowish(self): + return self.huetify_to_rgb_hex(0.166) + + @property + def random_color(self): + ch = randint(1, 4) + if ch == 1: + return self.reddish + elif ch == 2: + return self.greenish + elif ch == 3: + return self.greenish + else: + return self.yellowish diff --git a/BaseModels/currency_exchange/__init__.py b/BaseModels/currency_exchange/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/BaseModels/currency_exchange/alfabank_api/alfabank_api_funcs.py b/BaseModels/currency_exchange/alfabank_api/alfabank_api_funcs.py new file mode 100644 index 0000000..52909fc --- /dev/null +++ b/BaseModels/currency_exchange/alfabank_api/alfabank_api_funcs.py @@ -0,0 +1,108 @@ +import requests +import json +from datetime import datetime, timedelta +from BaseModels.mailSender import techSendMail +from GeneralApp.temp_data_funcs import * + + +def get_alfabank_nb_rate_by_currency_code(code, date=None): + rate = None + res = None + req_str = None + + try: + + msg = f'get_alfabank_nb_rate_by_currency_code' + print(msg) + + int_code = None + if code == 'USD': + int_code = 840 + elif code == 'EUR': + int_code = 978 + elif code == 'RUB': + int_code = 643 + + code_str = '' + if int_code: + code_str = f'?currencyCode={int_code}' + + date_str = '' + if date: + date_str = f'date={datetime.now().strftime("%d.%m.%Y")}' + if int_code: + date_str = f'&{date_str}' + else: + date_str = f'?{date_str}' + + req_str = f'https://developerhub.alfabank.by:8273/partner/1.0.1/public/nationalRates{code_str}{date_str}' + + try: + msg = f'GET {req_str}' + print(msg) + res = requests.get(req_str) + msg = f'answer received = {str(res)}' + print(msg) + except Exception as e: + msg = f'Exception GET {req_str} = {str(e)} ({str(res)})' + print(msg) + res = None + + if res: + + # if not res and res != 200: + # if tmp_rec: + # rate = tmp_rec.json_data['rate'] + # else: + # rate_Dict = { + # 'rate': 1, + # 'DT': datetime.now().strftime('%d.%m.%Y %H:%M') + # } + # create_or_update_tmp_data('currency_rate', code, rate_Dict) + # rate = 1 + # + # msg = '!!!!! --- get_alfabank_nbrb_rate_by_currency_code requests GET error={0}
{1}
{2}
rate set = {3}'.format( + # str(e), + # str(res), + # str(req_str), + # str(rate) + # ) + # print(msg) + # techSendMail(msg, 'tE get_alfabank_nbrb_rate_by_currency_code error') + + data = json.loads(res.content) + + for item in data['rates']: + if item['iso'].upper() == code.upper(): + rate = item['rate'] / item['quantity'] + + rate_Dict = { + 'rate': rate, + 'DT': datetime.now().strftime('%d.%m.%Y %H:%M') + } + + create_or_update_tmp_data('currency_rate', code, rate_Dict) + break + + except Exception as e: + msg = '!!!!! --- get_alfabank_nb_rate_by_currency_code error={0}
{1}
{2}'.format( + str(e), + str(res), + str(req_str) + ) + print(msg) + techSendMail(msg, 'tE get_alfabank_nb_rate_by_currency_code error') + + # if not res: + # rate_Dict = { + # 'rate': 1, + # 'DT': datetime.now().strftime('%d.%m.%Y %H:%M') + # } + # create_or_update_tmp_data('currency_rate', code, rate_Dict) + # return 1 + + # if rate: + msg = f'get alfabank nb {code} rate = {str(rate)}' + print(msg) + + return rate diff --git a/BaseModels/currency_exchange/funcs.py b/BaseModels/currency_exchange/funcs.py new file mode 100644 index 0000000..5c71f36 --- /dev/null +++ b/BaseModels/currency_exchange/funcs.py @@ -0,0 +1,51 @@ +import requests +import json +from datetime import datetime, timedelta +from BaseModels.mailSender import techSendMail +from GeneralApp.temp_data_funcs import * + + +def get_rate_nb_by_currency_code(code, date=None): + from .nbrb.nbrb_currency_exchange import get_nbrb_rate_by_currency_code + from .alfabank_api.alfabank_api_funcs import get_alfabank_nb_rate_by_currency_code + + if code == 'BYN': + return 1 + + rate = None + request_required = True + + try: + + tmp_rec = get_tmp_data('currency_rate', code) + if tmp_rec and tmp_rec.json_data: + if 'rate' in tmp_rec.json_data: + # если с момента последнего импорта прошло меньше 30 минут - забираем курс из базы + if datetime.strptime(tmp_rec.json_data['DT'], '%d.%m.%Y %H:%M') + timedelta( + minutes=30) > datetime.now(): + rate = tmp_rec.json_data['rate'] + + if not rate: + # если с последней попытки меньше минуты - отдаем старый курс или None + if tmp_rec.modifiedDT + timedelta(minutes=5) > datetime.now(): + if 'rate' in tmp_rec.json_data: + rate = tmp_rec.json_data['rate'] + else: + request_required = False + + if request_required: + if not rate: + rate = get_alfabank_nb_rate_by_currency_code(code) + + # if not rate: + # rate = get_nbrb_rate_by_currency_code(code) + + tmp_rec.modifiedDT = datetime.now() + tmp_rec.save() + + except Exception as e: + msg = f'!!!!! --- get_rate_nb_by_currency_code error={str(e)}' + print(msg) + techSendMail(msg, 'tE get_rate_nb_by_currency_code error') + + return rate diff --git a/BaseModels/currency_exchange/nbrb/__init__.py b/BaseModels/currency_exchange/nbrb/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/BaseModels/currency_exchange/nbrb/nbrb_currency_exchange.py b/BaseModels/currency_exchange/nbrb/nbrb_currency_exchange.py new file mode 100644 index 0000000..5d5b95a --- /dev/null +++ b/BaseModels/currency_exchange/nbrb/nbrb_currency_exchange.py @@ -0,0 +1,115 @@ +import requests +import json +from datetime import datetime, timedelta +from BaseModels.mailSender import techSendMail +from GeneralApp.temp_data_funcs import * + + +def get_nbrb_currency_id_by_currency_code(code): + data = requests.get('https://www.nbrb.by/api/exrates/currencies') + + json_data = json.loads(data.content) + + for item in json_data: + if 'Cur_Abbreviation' in item and item['Cur_Abbreviation'] == code: + return item['Cur_Code'] + + return None + + +def get_nbrb_rate_by_currency_code(code, date=None): + # if code == 'BYN': + # return 1 + # + # tmp_rec = get_tmp_data('currency_rate', code) + # if tmp_rec and tmp_rec.json_data: + # # если с момента последнего импорта прошло меньше 30 минут - забираем курс из базы + # if datetime.strptime(tmp_rec.json_data['DT'], '%d.%m.%Y %H:%M') + timedelta(minutes=30) > datetime.now(): + # return tmp_rec.json_data['rate'] + + # currency_id = get_nbrb_currency_id_by_currency_code('USD') + rate = None + res = None + req_str = None + + try: + + msg = f'get_nbrb_rate_by_currency_code' + print(msg) + + if not date: + # data = requests.get('https://www.nbrb.by/API/ExRates/Rates/{0}?Periodicity=0'.format(str(currency_id))) + req_str = 'https://www.nbrb.by/api/exrates/rates/{0}?parammode=2'.format(str(code)) + else: + date_str = datetime.now().strftime('%Y-%m-%d') + date_str = date_str.replace('-0', '-') + req_str = 'https://www.nbrb.by/api/exrates/rates/{0}?parammode=2&ondate={1}'.format( + str(code), + date_str + ) + e = None + try: + msg = f'GET {req_str}' + print(msg) + res = requests.get(req_str, timeout=3) + msg = f'answer received = {str(res)}' + print(msg) + except Exception as e: + msg = f'Exception GET {req_str} = {str(e)} ({str(res)})' + print(msg) + res = None + + if not res and res != 200: + # if tmp_rec: + # rate = tmp_rec.json_data['rate'] + # else: + # rate_Dict = { + # 'rate': 1, + # 'DT': datetime.now().strftime('%d.%m.%Y %H:%M') + # } + # create_or_update_tmp_data('currency_rate', code, rate_Dict) + # rate = 1 + + msg = '!!!!! --- get_nbrb_rate_by_currency_code requests GET error={0}
{1}
{2}
rate set = {3}'.format( + str(e), + str(res), + str(req_str), + str(rate) + ) + print(msg) + techSendMail(msg, 'tE get_nbrb_rate_by_currency_code error') + + data = json.loads(res.content) + + if data and 'Cur_OfficialRate' in data and 'Cur_Scale' in data: + rate = data['Cur_OfficialRate'] / data['Cur_Scale'] + + rate_Dict = { + 'rate': rate, + 'DT': datetime.now().strftime('%d.%m.%Y %H:%M') + } + + create_or_update_tmp_data('currency_rate', code, rate_Dict) + + except Exception as e: + msg = '!!!!! --- get_nbrb_rate_by_currency_code error={0}
{1}
{2}'.format( + str(e), + str(res), + str(req_str) + ) + print(msg) + techSendMail(msg, 'tE get_nbrb_rate_by_currency_code error') + + # if not res: + # rate_Dict = { + # 'rate': 1, + # 'DT': datetime.now().strftime('%d.%m.%Y %H:%M') + # } + # create_or_update_tmp_data('currency_rate', code, rate_Dict) + # return 1 + + if rate: + msg = f'get nbrb nb rate = {rate}' + print(msg) + + return rate diff --git a/BaseModels/decorators.py b/BaseModels/decorators.py new file mode 100644 index 0000000..0d2411a --- /dev/null +++ b/BaseModels/decorators.py @@ -0,0 +1,35 @@ +from django.http import HttpResponse, JsonResponse +import json + +# _make_result = lambda result: HttpResponse(json.dumps(result), mimetype='application/json') +_make_result = lambda result: JsonResponse(result) + + +def jsonifydata(): + def decorator(func): + def wrapper(request, *args, **kwargs): + result = func(request, *args, **kwargs) + return HttpResponse(json.dumps(result), mimetype='application/json') + return wrapper + return decorator + + +def jsonify(validation_form=None): + + def decorator(func): + + def wrapper(request, *args, **kwargs): + + if not validation_form is None: + form = validation_form(data=request.POST, files=request.FILES) + + if form.is_valid(): + request.form_data = form.cleaned_data + else: + return _make_result({'result': False, 'errors': form.errors}) + + # return _make_result({'result': func(request, *args, **kwargs)}) + return _make_result(func(request, *args, **kwargs)) + + return wrapper + return decorator \ No newline at end of file diff --git a/BaseModels/error_processing.py b/BaseModels/error_processing.py new file mode 100644 index 0000000..817501d --- /dev/null +++ b/BaseModels/error_processing.py @@ -0,0 +1,67 @@ +from tEDataProj.settings import EXCEPTION_IMPORT_LOG_PATH, EXCEPTION_LOG_PATH +import codecs +from datetime import datetime + +def open_log_file(message, filename=None, import_exc=False): + + if not filename: + if import_exc: + filename = u'import_errors.log' + else: + filename = u'errors.log' + + if import_exc: + path = EXCEPTION_IMPORT_LOG_PATH + else: + path = EXCEPTION_LOG_PATH + + f = codecs.open(path + filename, 'a', "utf-8") + + msg = u'{0} - {1}\n---------------------------\n\n'.format( + str(datetime.now()), + message + ) + f.write(msg) + + return f + + +def close_log_file(f, message): + + msg = u'---------------------------\n{0} - {1}\n\n'.format( + str(datetime.now()), + message + ) + f.write(msg) + + f.close() + + return True + + +def save_log_string(f, exc_data): + + msg = u'- {0} - {1} ({2})\n{3}\n'.format( + str(datetime.now()), + exc_data['err_code'], + exc_data['err_text'], + exc_data['err_data'], + ) + + + f.write(msg) + + return True + + +def generate_error(f, err_code, err_text, err_data): + + exc_data = { + 'err_code' : err_code, + 'err_text' : err_text, + 'err_data' : err_data + } + + save_log_string(f, exc_data) + + return exc_data \ No newline at end of file diff --git a/BaseModels/functions.py b/BaseModels/functions.py new file mode 100644 index 0000000..f19bb49 --- /dev/null +++ b/BaseModels/functions.py @@ -0,0 +1,537 @@ +## -*- coding: utf-8 -*- +__author__ = 'SDE' + +from django.utils.html import strip_tags +# from uuslug import slugify +import json +import os.path +from PIL import Image +from django.core.files.uploadedfile import InMemoryUploadedFile +from BaseModels.mailSender import techSendMail +from datetime import datetime, timedelta + + +def get_near_work_day(DT): + if DT.isoweekday() < 6: + return DT + + return DT + timedelta(days=8 - DT.isoweekday()) + + +def get_next_DT_for_monthes_delta_great(monthes_delta, fromDT=datetime.now()): + DT = fromDT + i = 0 + + cur_month = DT.month + + while cur_month == DT.month: + DT = DT + timedelta(days=1) + + # подбираем ближайший день, существующий в месяце + fail = True + i = 0 + while fail: + try: + DT = DT.replace(day=fromDT.day - i) + fail = False + except: + i += 1 + # DT = DT - timedelta(days=1) + # DT = DT.replace(hour=23, minute=59, second=59) + + return DT + + +def get_prev_DT_for_monthes_delta_less(monthes_delta, fromDT=datetime.now()): + DT = fromDT + i = 0 + + while i < monthes_delta: + DT = DT.replace(day=1) + DT = DT - timedelta(days=1) + i += 1 + + # подбираем ближайший день, существующий в месяце + fail = True + i = 0 + while fail: + try: + DT = DT.replace(day=fromDT.day - i) + fail = False + except: + i += 1 + # DT = DT - timedelta(days=1) + # DT = DT.replace(hour=23, minute=59, second=59) + + return DT + + +def correct_filter_name_for_filter_and_create(filter_kwargs): + filter_Dict = {} + create_Dict = {} + + filter_Dict.update(filter_kwargs) + create_Dict.update(filter_kwargs) + + if 'name' in filter_kwargs: + filter_Dict['name__iexact'] = filter_kwargs['name'] + del filter_Dict['name'] + + if 'id' in filter_kwargs: + del filter_Dict['id'] + del create_Dict['id'] + + return filter_Dict, create_Dict + + +def date_range_as_Dict(start_date, end_date): + import datetime + # for ordinal in range(start_date.toordinal(), end_date.toordinal()): + # yield datetime.date.fromordinal(ordinal) + + return [{start_date + datetime.timedelta(n): {}} for n in range(int((end_date - start_date).days) + 1)] + + +def sortByLength(inputStr): + return len(inputStr) + + +def add_domain(request, url, add_lang=False): + domain = get_domain_by_request(request) + if add_lang: + cur_lang = get_cur_lang_by_request(request) + return '{0}/{1}/{2}'.format(domain, cur_lang, url) + else: + return '{0}{1}'.format(domain, url) + + +def get_domain_by_request(request): + from project_sets import domain + if request.query_params and 'domain' in request.query_params: + return request.query_params['domain'] + return domain + + +def get_cur_lang_by_request(request): + from project_sets import lang + if request.query_params and 'cur_lang' in request.query_params: + return request.query_params['cur_lang'] + return lang + + +def get_img_type_by_request(request): + if request.query_params and 'img_type' in request.query_params: + return request.query_params['img_type'] + + return 'webp' + + +def image_convert_to_png(photo_file, save_file_path=None): + from io import BytesIO + from PIL import Image as Img + print('image_convert_to_png') + + try: + + fn_list = photo_file.name.split('.') + if len(fn_list) > 1: + fp = fn_list[0] + '.png' + else: + fp = photo_file.name + '.png' + + image = Img.open(photo_file) + + print('photo was uploaded') + + try: + image.convert("RGB") + print('photo was converted to RGB') + except: + print('!!! fail convert photo to RGB') + + if save_file_path: + image.save(save_file_path, format="PNG") + + print('photo was saved') + + fileBytes = BytesIO() + image.save(fileBytes, format="PNG") + print('photo was preparing for streaming') + + memoryFile = InMemoryUploadedFile(fileBytes, None, fp, 'image/png', 1, None) + + return memoryFile + + except Exception as e: + msg = 'image_convert_to_png error={0}'.format(str(e)) + print(msg) + techSendMail(msg, 'image_convert_to_png error') + return {'error': msg} + + +def image_convert_to_webP(photo_file, save_file_path=None): + from io import BytesIO + from PIL import Image as Img + + fn_list = photo_file.name.split('.') + if len(fn_list) > 1: + webP_fp = fn_list[0] + '.webp' + else: + webP_fp = photo_file.name + '.webp' + + image = Img.open(photo_file) + + image.convert("RGB") + + if save_file_path: + image.save(save_file_path, format="WEBP") + + fileBytes = BytesIO() + image.save(fileBytes, format="WEBP") + memoryFile = InMemoryUploadedFile(fileBytes, None, webP_fp, 'image/webp', 1, None) + + return memoryFile + + +def get_thumb_path(full_filepath, img_type): + if img_type == 'webp': + convert_to_webP = True + else: + convert_to_webP = False + + icon_path = None + + full_filepath = full_filepath.replace('\\', '/') + + if not os.path.exists(full_filepath): + return None + + path_list = full_filepath.split('/') + filename = path_list[-1] + filepath = '/'.join(path_list[:-1]) + + if convert_to_webP: + fn_list = filename.split('.') + if len(fn_list) > 1: + filename = fn_list[0] + '.webp' + else: + filename = filename + '.webp' + + icon_path = '{0}/icon-{1}'.format(filepath, filename) + + if not os.path.exists(icon_path): + size = (300, 300) + img = Image.open(full_filepath) + if convert_to_webP: + img.convert("RGB") + img.thumbnail(size) + if convert_to_webP: + img.save(icon_path, 'WEBP') + else: + img.save(icon_path) + + return icon_path + + +def get_filename_from_path(filepath, wo_ext=False): + f_list = filepath.split('/') + if len(f_list) > 1: + filename = f_list[-1] + else: + filename = f_list[0] + + f_list = filename.split('\\') + if len(f_list) > 1: + filename = f_list[-1] + else: + filename = f_list[0] + + if filename and wo_ext: + f_list = filename.split('.') + filename = f_list[0] + + return filename + + +def get_free_filename(filename, filepath): + from os import path, access, R_OK # W_OK for write permission. + + full_path = filepath + filename + + i = 0 + while path.exists(full_path) and path.isfile(full_path) and access(full_path, R_OK): + i += 1 + full_path = filepath + filename + '-{0}'.format(str(i)) + + return full_path + + +def url_translit(value): + value = translit(value).lower() + # value = slugify_text(value).lower() + # value = value.replace(u',', u'-') + # value = value.replace(u'.', u'-') + # value = value.replace(u'_', u'-') + # value = value.replace(u'"', u'') + # value = value.replace(u'“', u'') + # value = value.replace(u'”', u'') + # value = value.replace(u"'", u'') + # value = value.replace(u'/', u'-') + # value = value.replace(u'\\', u'-') + # value = value.replace(u'(', u'') + # value = value.replace(u')', u'') + # value = value.replace(u'&', u'-and-') + # value = value.replace(u' ', u'-') + # value = value.replace(u'%', u'') + # value = value.replace(u'*', u'-') + # value = value.replace(u'±', u'-') + + allow_symbols = '0123456789abcdefghijklmnopqrstuvwxyz-' + i = 0 + while i < len(value): + if not value[i] in allow_symbols: + value = value.replace(value[i], '-') + + i += 1 + + while '--' in value: + value = value.replace(u'--', u'-') + + if value[len(value) - 1] == '-': + value = value[:-1] + + return value + + +def translit(locallangstring): + conversion = { + u'\u0410': 'A', u'\u0430': 'a', + u'\u0411': 'B', u'\u0431': 'b', + u'\u0412': 'V', u'\u0432': 'v', + u'\u0413': 'G', u'\u0433': 'g', + u'\u0414': 'D', u'\u0434': 'd', + u'\u0415': 'E', u'\u0435': 'e', + u'\u0401': 'Yo', u'\u0451': 'yo', + u'\u0416': 'Zh', u'\u0436': 'zh', + u'\u0417': 'Z', u'\u0437': 'z', + u'\u0418': 'I', u'\u0438': 'i', + u'\u0419': 'Y', u'\u0439': 'y', + u'\u041a': 'K', u'\u043a': 'k', + u'\u041b': 'L', u'\u043b': 'l', + u'\u041c': 'M', u'\u043c': 'm', + u'\u041d': 'N', u'\u043d': 'n', + u'\u041e': 'O', u'\u043e': 'o', + u'\u041f': 'P', u'\u043f': 'p', + u'\u0420': 'R', u'\u0440': 'r', + u'\u0421': 'S', u'\u0441': 's', + u'\u0422': 'T', u'\u0442': 't', + u'\u0423': 'U', u'\u0443': 'u', + u'\u0424': 'F', u'\u0444': 'f', + u'\u0425': 'H', u'\u0445': 'h', + u'\u0426': 'Ts', u'\u0446': 'ts', + u'\u0427': 'Ch', u'\u0447': 'ch', + u'\u0428': 'Sh', u'\u0448': 'sh', + u'\u0429': 'Sch', u'\u0449': 'sch', + u'\u042a': '', u'\u044a': '', + u'\u042b': 'Y', u'\u044b': 'y', + u'\u042c': '', u'\u044c': '', + u'\u042d': 'E', u'\u044d': 'e', + u'\u042e': 'Yu', u'\u044e': 'yu', + u'\u042f': 'Ya', u'\u044f': 'ya', + u'№': 'no', + } + translitstring = [] + for c in locallangstring: + translitstring.append(conversion.setdefault(c, c)) + return ''.join(translitstring) + + +def slugify_text(str_text): + utf8_code = False + try: + str_text = str_text.encode('utf-8').decode('utf-8') + utf8_code = True + except: + pass + + if utf8_code == False: + try: + str_text = str_text.decode('utf-8') + except: + pass + + str_text = del_bad_symbols(str_text) + + str_text = str_text.replace(u'"', u'') + str_text = str_text.replace(u"'", u'') + str_text = str_text.replace(u".", u'') + str_text = str_text.replace(u",", u'') + str_text = str_text.replace(u" -", u'-') + str_text = str_text.replace(u"- ", u'-') + str_text = str_text.replace(u"„", u'') + str_text = str_text.replace(u"(", u'') + str_text = str_text.replace(u")", u'') + str_text = str_text.replace(u"{", u'') + str_text = str_text.replace(u"}", u'') + str_text = str_text.replace(u"<", u'') + str_text = str_text.replace(u">", u'') + + str = translit(str_text) + str = translit(str) + if len(str) < 2 or len(str) + 3 < len(str_text): + str = translit(str_text) + str = translit(str) + + str = str.replace(u"'", u'') + str = str.replace(u'"', u'') + + if len(str) < 2: + str = u'' + return str + + +def get_price_from_string_w_del_tails(string): + string = del_bad_symbols(string) + + while string.find(' ') > -1: + string = string.replace(' ', '') + string = string.replace(u'$', '') + string = string.replace(u'USD', '') + string = string.replace(u'Br', '') + string = string.replace(u'руб.', '') + string = string.replace(u',', '.') + + return string + + +def kill_pretexts(txt): + pretexts = [ + 'в', 'без', 'до', 'из', 'к', 'на', 'по', 'о', 'от', 'перед', 'при', 'через', 'с', 'у', 'за', 'над', + 'об', 'под', 'про', 'для' + ] + + words = txt.split(' ') + words = [item for item in words if not item in pretexts] + + return ' '.join(words) + + +def stay_only_text_and_numbers(txt): + bad_symbols = '"~`{}[]|!@#$%^&*()_+№;:?= ' + nums = '0123456789' + + for symbol in bad_symbols: + txt = txt.replace(symbol, ' ') + + symbols_for_check = ',.' + i = 0 + while i < len(txt): + if txt[i] in ['.', ',']: + if i < 1 or not txt[i - 1] in nums or i == len(txt) - 1 or not txt[i + 1] in nums: + txt_list = list(txt) + txt_list[i] = ' ' + txt = ''.join(txt_list) + # if txt[i] in ['"']: + # if i < 1 or not txt[i - 1] in nums: + # txt_list = list(txt) + # txt_list[i] = ' ' + # txt = ''.join(txt_list) + + i += 1 + + txt = txt.strip() + while ' ' in txt: + txt = txt.replace(' ', ' ') + + return txt + + +def del_bad_symbols_and_enters_and_tags(string): + # from string import maketrans + + try: + + string = strip_tags(string) + string = string.replace('\r\n', '') + del_bad_symbols(string) + except: + pass + + return string + + +def del_bad_symbols(string): + # from string import maketrans + + try: + + # string = strip_tags(string) + # string = string.replace('\r\n','') + string = string.strip() + + while string.find(' ') > -1: + string = string.replace(' ', ' ') + # table = maketrans(' ', ' ') + # string = string.translate(table) + + while string.find(' ') > -1: + string = string.replace(' ', ' ') + except: + pass + + return string + + +# def get_offers_from_cookie(request): +# if 'oknaplast_right_offers' in request.COOKIES: +# order_list = json.loads(request.COOKIES['oknaplast_right_offers'], encoding='utf8') +# return WindowOfferModel.objects.filter(id__in=order_list) +# else: +# return [] + + +def del_nbsp(string): + mapping = [ + (""", u'"'), + ('&', u'&'), + ('<', u'<'), + ('>', u'>'), + (' ', u' '), + ('¡', u'¡'), + ('¢', u'¢'), + ('£', u'£'), + ('¤', u'¤'), + ('¥', u'¥'), + ('¦', u'¦'), + ('§', u'§'), + ('¨', u'¨'), + ('©', u'©'), + ('ª', u'ª'), + ('«', u'«'), + ('¬', u'¬'), + ('®', u'®'), + ('¯', u'¯'), + ('°', u'°'), + ('±', u'±'), + ('²', u'²'), + ('³', u'³'), + ('´', u'´'), + ('µ', u'µ'), + ('¶', u'¶'), + ('·', u'•'), + ('¸', u'¸'), + ('¹', u'¹'), + ('º', u'º'), + ('»', u'»'), + ('¼', u'¼'), + ('½', u'½'), + ('¾', u'¾'), + ('€', u'€'), + ('\n', ''), + ('\r', ''), + ('\t', ' '), + ('—', '-'), + ] + for pair in mapping: + string = string.replace(pair[0], pair[1]) + return string diff --git a/BaseModels/inter.py b/BaseModels/inter.py new file mode 100644 index 0000000..b72fe62 --- /dev/null +++ b/BaseModels/inter.py @@ -0,0 +1,189 @@ +# -*- coding: utf-8 -*- + +from django.http import HttpResponse +import json +import csv +from .mailSender import techSendMail + +import re + +numbers = '0123456789.,' + + +def get_unique_url(model, name, url=None): + from .functions import url_translit + + if not url: + url = url_translit(name) + + try: + obj = model.objects.get(url=url) + except model.DoesNotExist: + return url + + urls = model.objects.all().values_list('url', flat=True) + + i = 1 + while url in urls: + url = f'{url}-{i}' + i += 1 + + return url + + +def dicts_join(dict1, dict2, inplace=False): + result = dict1 if inplace else dict1.copy() + result.update(dict2) + return result + + +def set_ru_locale(): + import locale + + try: + locale.setlocale(locale.LC_ALL, 'ru_RU.utf8') + except Exception as e: + msg = '!!!!! --- set_ru_locale exception error={0}
{1}
'.format( + str(e), + str(e.args) + ) + print(msg) + techSendMail(msg, 'set_ru_locale') + return False + + return True + + +def get_all_videos_from_html_content(html): + if not html: + return None + + res = re.findall('iframe.*src=\"(.+?)\"', html) + + return res + + +def get_all_photos_from_html_content(html): + res = re.findall('src=\"(.+?)\"', html) + return res + + +def get_choices_value_by_choices_id(choices, id): + for ch_id, ch_val in choices: + if ch_id == id: + return ch_val + + return None + + +def sortByLength(inputStr): + return len(inputStr) + + +def get_current_language(request): + return request.LANGUAGE_CODE + + +def cut_to_number_w_point(string): + import re + + if not string: + return string + + string = string.replace(',', '.') + # str_list = string.split(',') + # + # if len(str_list) > 1: + # string = str_list[0] + # else: + # str_list = string.split('.') + # if len(str_list) > 2: + # string = u'{0}.{1}'.format(str_list[0], str_list[1]) + + try: + # шаблон для обрезки до цифр + p = '[0-9]+.[0-9]+' + number = u''.join(re.findall(p, string)) + if number == u'': + p = '[0-9]+' + number = u''.join(re.findall(p, string)) + except: + number = None + return number + + +def cut_to_number(string): + import re + + if not string: + return string + + # шаблон для обрезки до цифр + p = '[\d]+' + number = ''.join(re.findall(p, string)) + return number + + +def range_dates(start, end): + """ Returns the date range """ + from datetime import timedelta + + list = [start + timedelta(days=days) for days in range(0, (end - start).days + 1)] + return list + + # assert start <= end + # current = start.year * 12 + start.month - 1 + # end = end.year * 12 + end.month - 1 + # list = [] + # while current <= end: + # yield date(current // 12, current % 12 + 1, 1) + # current += 1 + + +# разбираем csv строку, получаем Dict +def get_Dict_from_csv_data(csv_data): + data = {} + for item in csv_data.split(';'): + try: + if item: + data.update(dict([item.split(':')[0:2]])) + except ValueError: + continue + + return data + # return dict([item.split(':')[0:2] for item in csv_data.split(';') if item]) + + +def cut_url_toPageName(url): + pageName = url.split('/')[-1] # получаем урл страницы + return pageName + + +def jsonify(): + def decorator(func): + def wrapper(request, *args, **kwargs): + result = func(request, *args, **kwargs) + return HttpResponse(json.dumps(result), mimetype='application/json') + + return wrapper + + return decorator + + +def check_perms_for_view_order(request, order): + def decorator(func): + def wrapper(request, *args, **kwargs): + c_user = request.user + if order: + if c_user == order.user or c_user == order.forUser: + return True + else: + if c_user.has_perm('OrdersApp.can_see_orders_all_companys'): + return True + else: + if order.group == c_user.group and c_user.has_perm('OrdersApp.can_see_orders_self_company'): + return True + + return wrapper + + return decorator diff --git a/BaseModels/json_funcs.py b/BaseModels/json_funcs.py new file mode 100644 index 0000000..8e5f6dc --- /dev/null +++ b/BaseModels/json_funcs.py @@ -0,0 +1,31 @@ +import json + + +def del_from_txt_bad_json_symbols(txt): + + log = '' + error = True + while error and len(txt) > 0: + try: + json.loads(txt) + error = None + except json.JSONDecodeError as e: + msg = '- длина контента = {2} - {1} - удален символ {0}'.format( + txt[e.pos], + str(e), + str(len(txt)-1) + ) + log = '{0}
{1}'.format(log, msg) + print(msg) + txt = txt[:e.pos] + txt[e.pos+1:] + error = e + + # import re + # r_str = r'[{\[]([,:{}\[\]0-9.\-+A-zr-u \n\r\t]|".*:?")+[}\]]' + # pattern = re.compile(r_str) + # txt = re.sub(r_str, '',txt) + # res = pattern.search(txt) + # if res: + # txt = res.string + + return txt, log \ No newline at end of file diff --git a/BaseModels/log/log_funcs.py b/BaseModels/log/log_funcs.py new file mode 100644 index 0000000..5599a7a --- /dev/null +++ b/BaseModels/log/log_funcs.py @@ -0,0 +1,103 @@ +from datetime import datetime, date +from django.db.models.fields.files import ImageFieldFile +from collections import OrderedDict + + +def send_mail_alert_w_data(obj, data, user=None): + + # try: + # + # article = getattr(obj, 'article', None) + # if article and article in ('10751', '10752', '10753', '10754', '10801', '10802', '10803', '10804'): + # from BaseModels.mailSender import techSendMail + # msg = f'change product {article}
' \ + # f'{datetime.now()}
' \ + # f'{str(user)}
' \ + # f'obj = {str(obj.__dict__)}
' \ + # f'data = {str(data)}
' + # techSendMail(msg, 'tE checkpoint alert') + # + # except Exception as e: + # print(f'send_mail_alert_w_data ERROR = {str(e)}') + + return True + + + +def get_normalized_data(data): + + if type(data) == OrderedDict: + data = dict(data) + + if '_state' in data: + del data['_state'] + + if type(data) == dict: + for key, val in data.items(): + if type(data[key]) in (dict, list, OrderedDict): + data[key] = get_normalized_data(val) + if type(data[key]) in (datetime, date, ImageFieldFile): + data[key] = str(val) + + if type(data) == list: + i = 0 + while i < len(data): + # if type(item) == OrderedDict: + # item = dict(item) + item = data[i] + + if type(item) in (dict, list, OrderedDict): + data[i] = get_normalized_data(item) + + if type(item) == dict: + for key, val in item.items(): + if type(item[key]) in (datetime, date, ImageFieldFile): + item[key] = str(val) + + elif type(item) == list: + ei = 0 + while ei < len(item): + if type(item[ei]) in (datetime, date, ImageFieldFile): + item[ei] = str(item[ei]) + + ei += 1 + + i += 1 + + return data + + + +def prepare_data_for_json(data): + + data = get_normalized_data(data) + + # if type(data) == OrderedDict: + # data = dict(data) + # + # if '_state' in data: + # del data['_state'] + # + # if type(data) == dict: + # for key, val in data.items(): + # if type(data[key]) in (datetime, date, ImageFieldFile): + # data[key] = str(val) + # + # if type(data) == list: + # for item in data: + # if type(item) == OrderedDict: + # item = dict(item) + # + # if type(item) == dict: + # for key, val in item.items(): + # if type(data[key]) in (datetime, date, ImageFieldFile): + # item[key] = str(val) + # + # elif type(item) == list: + # for el in item: + # if type(el) in (datetime, date, ImageFieldFile): + # el = str(el) + + + + return data \ No newline at end of file diff --git a/BaseModels/logging_change_data.py b/BaseModels/logging_change_data.py new file mode 100644 index 0000000..8743540 --- /dev/null +++ b/BaseModels/logging_change_data.py @@ -0,0 +1,259 @@ +from .mailSender import techSendMail + + + +# def get_order_changes_list_for_template(order): +# +# if order.json_data and 'data_log' in order.json_data: +# +# i = 0 +# changes_log = [] +# +# if len(order.json_data['data_log']) > 0: +# first_log_record = order.json_data['data_log'][0] +# +# while i ' + str(new_d1)) + # for val, key in new_d1.items(): + # added.update({key: (None, val)}) + elif not new_d1 and old_d2: + # modified = (old_d2) + # removed = {} + # for val, key in old_d2.items(): + # removed.update({key: (None, val)}) + removed.append(str(old_d2) + ' > None') + else: + d1_keys = set(new_d1.keys()) + d2_keys = set(old_d2.keys()) + intersect_keys = d1_keys.intersection(d2_keys) + added = d1_keys - d2_keys + removed = d2_keys - d1_keys + modified = {} + for o in intersect_keys: + if new_d1[o] != old_d2[o]: + if type(new_d1[o]) == dict: + modified.update({ + o: dict_compare(old_d2[o], new_d1[o]) + }) + elif type(new_d1[o]) in [list, tuple]: + modified.update({ + o: list_compare(old_d2[o], new_d1[o]) + }) + else: + modified.update({ + o: (old_d2[o], new_d1[o]) + }) + # modified = {o : (new_d1[o], old_d2[o]) for o in intersect_keys if new_d1[o] != old_d2[o]} + same = set(o for o in intersect_keys if new_d1[o] == old_d2[o]) + + # if not added: + # added = [] + # if not removed: + # removed = [] + # if not modified: + # modified = [] + + + if added or removed or modified: + return { + 'added': added, + 'removed': removed, + 'modified': modified, + # 'added' : list(added), + # 'removed' : list(removed), + # 'modified' : list(modified), + # 'same' : same + } + + except Exception as e: + + msg = 'dict_compare Error = {0}
{1}
{2}
{3}'.format( + str(e), + str(e.args), + old_d2, + new_d1 + ) + print(msg) + techSendMail(msg) + + return {} diff --git a/BaseModels/mailSender.py b/BaseModels/mailSender.py new file mode 100644 index 0000000..49f9c67 --- /dev/null +++ b/BaseModels/mailSender.py @@ -0,0 +1,353 @@ +## -*- coding: utf-8 -*- + +__author__ = 'SDE' + +from django.core.mail import EmailMultiAlternatives +# from AuthApp.models import UserProfileModel +import smtplib +from tEDataProj.settings import prod_server +from email.mime.multipart import MIMEMultipart +from email.mime.text import MIMEText +from email.mime.application import MIMEApplication +from email.mime.multipart import MIMEMultipart +from os.path import basename +from email.mime.base import MIMEBase +from email import encoders +import ssl +import time +import random +from tEDataProj import settings + + +# tech@truenergy.by +# k7n2d3ZFZo4@CU5$4YDk + +# administrator@truenergy.by +# 6&#WfW8$qR2w8uv69e5$ + + +def fix_mailing_links_in_mail(html): + from GeneralApp.views import get_cur_domain + serv_domain, client_domain = get_cur_domain() + + while 'src="/media/' in html: + html = html.replace('src="/media/', f'src="{serv_domain}media/') + + return html + + +def mailing_direct_by_maillist(subject, from_email, to, html_content, attachments=None): + log = '' + for email in to: + res = admin_send_mail_by_SMTPlib(subject, from_email, email, html_content, attachments) + # print(str(res)) + log = '{0}
{1}'.format(log, str(res)) + time.sleep(random.randint(1, 5)) + + return log + + +def prepare_attach_file(filepath, filename=None): + try: + + if not filename: + filename = basename(filepath) + + if not settings.MEDIA_ROOT in filepath: + filepath = f'{settings.MEDIA_ROOT}{filepath}' + + with open(filepath, "rb") as fil: + part = MIMEApplication( + fil.read(), + Name=filename + ) + # After the file is closed + part['Content-Disposition'] = 'attachment; filename="%s"' % filename + except Exception as e: + msg = f'prepare_attach_file Error = {str(e)}' + techSendMail(msg, title='prepare_attach_file') + return msg + + return part + + +def prepare_xls_attach_by_xls_virtual_file(virtual_file, filename): + ctype = 'application/octet-stream' + maintype, subtype = ctype.split('/', 1) + # with open(filepath, 'rb') as fp: + file = MIMEBase(maintype, subtype) # Используем общий MIME-тип + file.set_payload(virtual_file) # Добавляем содержимое общего типа (полезную нагрузку) + # fp.close() + encoders.encode_base64(file) # Содержимое должно кодироваться как Base64 + + file.add_header('Content-Disposition', 'attachment', filename=filename) + + return file + + +def admin_send_mail_by_SMTPlib(subject, from_email, to, html_content, attachments=None): + res = None + + try: + # smtp_server = 'mail.cln.by' # 'mail.truenergy.by' + # smtp_port = 2525 # 587 + # smtp_password = 'clNdt6a8a' # u'98q3$IjxH%RUIxySw8R2' + # smtp_login = 'support@cln.by' # 'support@truenergy.by' + # from_email = smtp_login + + try: + smtp_server = 'mail.truenergy.by' + smtp_port = 587 + smtp_password = 'eg4$#95Xp0T*V%ig5BbR' + smtp_login = 'support@truenergy.by' + res = send_mail_by_SMTPlib(subject, from_email, to, html_content, smtp_server, smtp_port, smtp_login, + smtp_password, attachments) + except: + smtp_server = 'mail.truenergy.by' + smtp_port = 25 + smtp_password = 'PowH@aL0a4%$iz0Uo5V$' + smtp_login = 'tech@truenergy.by' + res = send_mail_by_SMTPlib(subject, smtp_login, to, html_content, smtp_server, smtp_port, smtp_login, + smtp_password, attachments) + + except Exception as e: + # from Baldenini_site.SMS_sender import send_SMS + # send_SMS(u'375296177827', u'send_mail_by_SMTPlib error = {0}'.format(str(e)), urgent=True) + msg = 'admin_send_mail_by_SMTPlib error = {0}'.format(str(e)) + print(msg) + # techSendMail(msg) + + return str(res) + + +def send_mail_by_SMTPlib(subject, from_email, to_init, html_content, smtp_server, smtp_port, smtp_login, smtp_password, + attachments=None): + to = to_init + if not prod_server: + to = 'web@syncsystems.net' + else: + to = to_init + try: + from settings_local import DEBUG + except: + print('get settings_local fail') + + res = None + mail_lib = None + + time.sleep(1) + + try: + # context = ssl.create_default_context() + + mail_lib = smtplib.SMTP(smtp_server, smtp_port) + + res = mail_lib.ehlo() + + res = mail_lib.starttls() # context=context) + # print('mail_lib.starttls = {0}'.format(str(res))) + + res = mail_lib.ehlo() + # print('mail_lib.ehlo = {0}'.format(str(res))) + + res = mail_lib.set_debuglevel = 2 + # print('mail_lib.set_debuglevel = {0}'.format(str(res))) + + res = mail_lib.esmtp_features['auth'] = 'LOGIN PLAIN' + # print('mail_lib.esmtp_features = {0}'.format(str(res))) + + res = mail_lib.login(smtp_login, smtp_password) + # print('mail_lib.login = {0}'.format(str(res))) + + res = None + + if type(to) in (list, tuple): + if 'support@truenergy.by' in to: + to.remove('support@truenergy.by') + + if len(to) > 1: + to_str = u', '.join(to) + else: + to_str = to[0] + else: + if to == 'support@truenergy.by': + return None + to_str = to + to = [] + to.append(to_str) + + if type(subject) != str: + try: + subject = subject.decode('utf-8') + except: + try: + subject = subject.encode('utf-8') + except: + pass + + msg = MIMEMultipart() + from email.headerregistry import Address + msg['From'] = from_email + msg['Reply-To'] = from_email + # msg['In-Reply-To'] = "email2@example.com" + msg['To'] = to_str + msg['Subject'] = subject + msg.attach(MIMEText(html_content, 'html', 'utf-8')) + + # print('attach message complete') + + if attachments: + if type(attachments) in (list, tuple): + try: + for item in attachments: + res = msg.attach(item) + # print('attach file complete = {0}'.format(str(res))) + except: + res = msg.attach(attachments) + # print('except attach file complete = {0}'.format(str(res))) + else: + res = msg.attach(attachments) + # print('else attach file complete = {0}'.format(str(res))) + + res = mail_lib.sendmail(from_email, to, msg.as_string()) + + msg = mail_lib.quit() + # print('mail_lib.quit = {0}'.format(str(msg))) + + except Exception as e: + # from Baldenini_site.SMS_sender import send_SMS + # send_SMS(u'375296177827', u'send_mail_by_SMTPlib error = {0}'.format(str(e)), urgent=True) + msg = 'send_mail_by_SMTPlib error = {0}'.format(str(e)) + print(msg) + try: + mail_lib.quit() + # print('mail_lib.quit = {0}'.format(str(msg))) + except: + pass + + try: + print(str(mail_lib.__dict__)) + except: + pass + + return msg + # techSendMail(msg) + + msg = 'send_mail_by_SMTPlib subj={3} init_to={2} to={0} res={1}'.format(str(to), str(res), str(to_init), + str(subject)) + print(msg) + + return msg + + +def sendMail(subject, text_content, from_email, to, html_content): + print('sendMail to {0}'.format(str(to))) + + admin_send_mail_by_SMTPlib(subject, from_email, [to], html_content) + + # msg = EmailMultiAlternatives(subject, text_content, from_email, [to]) + # msg.attach_alternative(html_content, "text/html") + # msg.send() + print(u'Accept') + return u'Accept' + + +def techSendMail_for_top_management(html_content, title=None): + try: + + # if not prod_server: + # msg = '{0}. Not sended because is local'.format(html_content) + # print(msg) + # return msg + from AuthApp.models import User + from django.db.models import Q + + # to = ['web@syncsystems.net'] + to = User.objects.filter( + Q(is_superuser=True) | Q(groups__name__in=[ + 'Отдел продаж: Начальник отдела продаж', 'Управляющий', + 'Бухгалтерия: Главный бухгалтер' + ]), + is_active=True, + is_staff=True + ).values_list('email', flat=True) + to = list(to) + to.append('office@truenergy.by') + + print('techSendMail_for_top_management') + if title: + subject = title + else: + subject = u'truEnergy Data техническое оповещение' + from_email = 'support@truenergy.by' + + res = admin_send_mail_by_SMTPlib(subject, from_email, to, html_content) + + # msg = EmailMultiAlternatives(subject, text_content, from_email, to) + # msg.attach_alternative(html_content, "text/html") + # msg.send() + print(res) + return u'Accept' + + except Exception as e: + msg = 'techSendMail_for_top_management error={0}'.format(str(e)) + techSendMail(msg) + print(msg) + + return 'Fail' + + +def techSendMail_for_specified_email_list(html_content, email_list, title=None): + try: + + print('techSendMail_for_specified_email_list') + if title: + subject = title + else: + subject = u'truEnergy Data техническое оповещение' + from_email = 'support@truenergy.by' + + res = admin_send_mail_by_SMTPlib(subject, from_email, email_list, html_content) + + print(res) + return u'Accept' + + except Exception as e: + msg = 'techSendMail_for_specified_email_list error={0}'.format(str(e)) + techSendMail(msg) + print(msg) + + return 'Fail' + + +def techSendMail(html_content, title=None, add_emails=None): + # if not prod_server: + # msg = '{0}. Not sended because is local'.format(html_content) + # print(msg) + # return msg + + print('techSendMail') + + try: + # subject = u'truEnergy Data техническое оповещение' + from_email = 'support@truenergy.by' + to = ['web@syncsystems.net'] + if add_emails: + to.extend(add_emails) + text_content = 'Technical message from truEnergy.' + + if title: + subject = title + else: + subject = u'truEnergy Data техническое оповещение' + + res = admin_send_mail_by_SMTPlib(subject, from_email, to, html_content) + + print(res) + + except Exception as e: + msg = 'techSendMail error={0}'.format(str(e)) + # techSendMail(msg) + print(msg) + + return u'Accept' diff --git a/BaseModels/messages.py b/BaseModels/messages.py new file mode 100644 index 0000000..c196666 --- /dev/null +++ b/BaseModels/messages.py @@ -0,0 +1,163 @@ +## -*- coding: utf-8 -*- +__author__ = 'SDE' +# from Baldenini_site.inter import jsonify + +def get_error_message_Dict(show_icon=None): + print('get_error_message_Dict') + Dict = { + 'form_style' : u'border-color: #FFBBBB; background-color: #FFEAEA;', + } + if show_icon: + Dict.update({ + 'form_icon' : 'canceled.png', + }) + + return Dict + + +def get_good_message_Dict(show_icon=None): + Dict = { + 'form_style' : u'border-color: #BBFFBB; background-color: #EAFFEA;', + } + + if show_icon: + Dict.update({ + 'form_icon' : 'accepted.png', + }) + + return Dict + + +def get_return_to_ready_but(): + return { + 'buttons' : u'
ГОТОВО
' + } + + +def get_return_to_choice_buts(but_ok_name, but_cancel_name): + return { + 'buttons' : u'
{0}
' + u'
{1}
'.format(but_ok_name, but_cancel_name) + } + + +def get_error_message(caption, text, show_icon=None): + + Dict = { + 'message' : text, + 'caption' : caption + } + Dict.update(get_error_message_Dict(show_icon)) + Dict.update(get_return_to_ready_but()) + + return Dict + + +# @jsonify() +def show_error_message(caption, text, show_icon=None): + from django.template.loader import render_to_string + + return {'error':'error', + 'html': render_to_string( + 'm_show_message.html', + get_error_message(caption, text, show_icon) + ) + } + +# @jsonify() +def show_good_message(caption, text): + from django.template.loader import render_to_string + + return {'html': render_to_string( + 'm_show_message.html', + get_good_message(caption, text) + ) + } + + +def show_good_message_ok_go_to_blank_page(caption, text, button_caption, url): + from django.template.loader import render_to_string + + return {'html': render_to_string( + 'm_show_message.html', + get_good_message_ok_go_to_blank_page(caption, text, button_caption, url) + ) + } + + +# def show_choice_message_w_input(caption, text, but_ok_name, but_cancel_name, form): +# from django.template.loader import render_to_string +# +# return {'html': render_to_string( +# 'Messages/m_show_message.html', +# get_choice_message(caption, text, but_ok_name, but_cancel_name) +# ) +# } + + +def show_choice_message_green(caption, text, but_ok_name, but_cancel_name, form=None): + from django.template.loader import render_to_string + + return {'html': render_to_string( + 'm_show_message.html', + get_choice_message(caption, text, but_ok_name, but_cancel_name, form, u'green') + ) + } + + +def show_choice_message_red(caption, text, but_ok_name, but_cancel_name, form=None): + from django.template.loader import render_to_string + + return {'html': render_to_string( + 'm_show_message.html', + get_choice_message(caption, text, but_ok_name, but_cancel_name, form, u'red') + ) + } + + +def get_choice_message(caption, text, but_ok_name, but_cancel_name, form=None, color=u'red', show_icon=None): + + Dict = { + 'message' : text, + 'caption' : caption, + 'form' : form + } + + if color == u'red': + Dict.update(get_error_message_Dict(show_icon)) + elif color == u'green': + Dict.update(get_good_message_Dict(show_icon)) + + Dict.update(get_return_to_choice_buts(but_ok_name, but_cancel_name)) + + return Dict + + +def get_but_ok_go_to_blank_page(button_caption, url): + return { + 'buttons' : u'

{1}

'.format(url,button_caption) + } + + +def get_good_message_ok_go_to_blank_page(caption, text, button_caption, url, show_icon=None): + + Dict = { + 'message' : text, + 'caption' : caption + } + Dict.update(get_good_message_Dict(show_icon)) + Dict.update(get_but_ok_go_to_blank_page(button_caption, url)) + + return Dict + + +def get_good_message(caption, text, show_icon=None): + + Dict = { + 'message' : text, + 'caption' : caption + } + Dict.update(get_good_message_Dict(show_icon)) + Dict.update(get_return_to_ready_but()) + + return Dict \ No newline at end of file diff --git a/BaseModels/middlewares/web_requests_middleware.py b/BaseModels/middlewares/web_requests_middleware.py new file mode 100644 index 0000000..82d55da --- /dev/null +++ b/BaseModels/middlewares/web_requests_middleware.py @@ -0,0 +1,21 @@ +from datetime import datetime + + +class WebRequestMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + + response = self.get_response(request) + + if not '/v1/alert/' in request.path: + user = getattr(request, 'user', None) + if user and not user.is_anonymous and user.user_profile: + user.user_profile.last_web_request = datetime.now() + user.user_profile.save(update_fields=['last_web_request']) + + # from GeneralApp.temp_data_funcs import add_element_in_tmp_data_list + # add_element_in_tmp_data_list('user_activity', user.email, 'activities_DT', str(user.user_profile.last_web_request)) + + return response diff --git a/BaseModels/office_documents_utils.py b/BaseModels/office_documents_utils.py new file mode 100644 index 0000000..d97a2cd --- /dev/null +++ b/BaseModels/office_documents_utils.py @@ -0,0 +1,211 @@ +import copy + +from openpyxl import Workbook +from django.http import HttpResponse +from openpyxl.writer.excel import save_virtual_workbook +from openpyxl.utils import get_column_letter +from BaseModels.mailSender import techSendMail +from openpyxl.styles import PatternFill, Font, Alignment +from openpyxl.styles.borders import Border, Side +from openpyxl.styles.numbers import BUILTIN_FORMATS +from colorsys import rgb_to_hls + + +def pairwise(iterable): + a = iter(iterable) + return zip(a, a) + + +options_params_splitter = '<|>' + + +def set_col_options(ws, row, col, rows_len, options): + if type(options) == str: + options = dict(item.split("=")[::1] for item in options.split('&')) + + cols_set = 1 + if options: + + exists_group_option_for_column = False + for key in options.keys(): + if key.startswith('g_col_'): + exists_group_option_for_column = True + break + + if 'cols_merge' in options and options['cols_merge']: + cols_set = int(options['cols_merge']) + if cols_set > 1: + ws.merge_cells(start_row=row, start_column=col, end_row=row, end_column=col + cols_set - 1) + + if exists_group_option_for_column: + g_col_back_color = None + if 'g_col_back_color' in options and options['g_col_back_color']: + g_col_back_color = options['g_col_back_color'] + g_col_num_w_sep = None + if 'g_col_num_w_sep' in options and options['g_col_num_w_sep']: + g_col_num_w_sep = options['g_col_num_w_sep'] + + cur_col = col + while cur_col < col + cols_set: + cur_row = row + while cur_row < rows_len: + if g_col_back_color: + ws.cell(row=cur_row, column=cur_col).fill = PatternFill('solid', fgColor=g_col_back_color) + if g_col_num_w_sep: + ws.cell(row=cur_row, column=cur_col).number_format = '#,##0.00' + cur_row += 1 + cur_col += 1 + + if 'col_show_total' in options and options['col_show_total']: + ws.cell(row=rows_len, column=col).font = Font(bold=True) + ws.cell(row=rows_len, column=col).value = "=SUM({0}{1}:{0}{2})".format( + get_column_letter(col), + row + 1, + rows_len - 1 + ) + ws.cell(row=rows_len, column=col).number_format = '#,##0.00' + + if 'back_color' in options and options['back_color']: + ws.cell(row=row, column=col).fill = PatternFill('solid', fgColor=options['back_color']) + + if 'bold' in options and options['bold']: + ws.cell(row=row, column=col).font = Font(bold=True) + + if 'col_bold' in options and options['col_bold']: + cur_col = col + while cur_col < col + cols_set: + cur_row = row + while cur_row < rows_len: + ws.cell(row=cur_row, column=cur_col).font = Font(bold=True) + cur_row += 1 + cur_col += 1 + + return cols_set + + +def add_table_in_workbook(work_sheet, data, convert_minus_to_null=False, headers_rows_count=0): + thin_border = Border(left=Side(style='thin'), + right=Side(style='thin'), + top=Side(style='thin'), + bottom=Side(style='thin')) + + r = 1 + + for row in data: + + try: + + c = 1 + cols = row + if type(data) == dict: + cols = row.values() + for val in cols: + options = None + + inc_c = 1 + + work_sheet.cell(row=r, column=c).border = thin_border + + # получаем опции + if type(val) == str: + val_w_options = val.split(options_params_splitter) + if len(val_w_options) > 1: + val = val_w_options[0] + # применяем опции + inc_c = set_col_options(work_sheet, row=r, col=c, rows_len=len(data) + 1, + options=val_w_options[1]) + elif type(val) == dict: + inc_c = set_col_options(work_sheet, row=r, col=c, rows_len=len(data) + 1, options=val) + val = val['val'] + + # если стоит опция "минусовые значения преобразовывать в нулевые" + if convert_minus_to_null: + try: + if val < 0: + val = 0 + except: + pass + + try: + work_sheet.cell(row=r, column=c).value = val + except: + work_sheet.cell(row=r, column=c).value = str(val) + + c += inc_c + + except Exception as e: + msg = f'add_table_in_workbook in row {str(r)} ERROR = {str(e)}' + print(msg) + + r += 1 + + try: + + dims = {} + row_c = 0 + for row in work_sheet.rows: + # не подгоняем данные под надписи в хэдере + if row_c < headers_rows_count: + row_c += 1 + continue + + for cell in row: + if cell.value: + dims[cell.column] = max((dims.get(cell.column, 0), len(str(cell.value)))) + + row_c += 1 + + for col, value in dims.items(): + if value > 150: + value = 150 + if value < 3: + value = 3 + work_sheet.column_dimensions[get_column_letter(col)].width = value + + except Exception as e: + msg = f'add_table_in_workbook in sets width ERROR = {str(e)}' + print(msg) + + return + + +def get_xls_file_by_data_list(data, convert_minus_to_null=False): + try: + + wb = Workbook() + ws = wb.active + + if type(data) == list and len(data) and type(data[0]) == dict: + + i = 0 + for page in data: + + title = None + if 'title' in page: + title = page['title'] + + # если первая страница - она уже создана, просто переименовываем + if i == 0: + if title: + ws.title = title + else: + ws = wb.create_sheet(title) + + headers_rows_count = 0 + if 'headers_rows_count' in page: + headers_rows_count = page['headers_rows_count'] + + add_table_in_workbook(ws, page['table'], convert_minus_to_null, headers_rows_count) + + i += 1 + else: + add_table_in_workbook(ws, data, convert_minus_to_null) + + xls_file = save_virtual_workbook(wb) + + return xls_file + + except Exception as e: + msg = str(e) + print(msg) + return msg diff --git a/BaseModels/paging.py b/BaseModels/paging.py new file mode 100644 index 0000000..d1f8168 --- /dev/null +++ b/BaseModels/paging.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +__author__ = 'SDE' + +def get_paging_Dict(request, elements_count, elements_on_page, from_page, to_page=None): + + + + pages_count = elements_count / elements_on_page + if elements_count % elements_on_page > 0: + pages_count = pages_count + 1 + + pages = [] + + if to_page: + cur_page = to_page + else: + cur_page = from_page + + # количство страниц, которое отображается в ряд (без разделенного пэйджинга) + pages_wo_separated_paging = 20 + + if pages_countpages_count+1-pages_count_wo_separate_for_side: + mid_left_page = (pages_count+1-pages_count_for_begin_end_paging-pages_count_wo_separate_for_side) / 2 #считаем сколько страниц внутри точек + mid_left_page = pages_count_for_begin_end_paging + mid_left_page # это и есть средняя страница в левой части + pages.append((u'...',mid_left_page)) + for p in range(pages_count+1-pages_count_wo_separate_for_side,pages_count-pages_count_for_begin_end_paging+1): + pages.append((p,p)) + # elif num_current_page>pages_count-6: + # mid_left_page = (pages_count -2 -7) / 2 #считаем сколько страниц внутри точек + # mid_left_page = 3 + mid_left_page # это и есть средняя страница в левой части + # pages.append((u'...',mid_left_page)) + # pages.append((pages_count-6,pages_count-6)) + # pages.append((pages_count-5,pages_count-5)) + # pages.append((pages_count-4,pages_count-4)) + # pages.append((pages_count-3,pages_count-3)) + else: + mid_page = cur_page + + mid_left_page = (mid_page-1 -pages_count_for_begin_end_paging)/2 + pages_count_for_begin_end_paging + mid_rigth_page = (pages_count - (mid_page+pages_count_for_begin_end_paging))/2 + mid_page+1 + + # количество страниц, которые добавляются слева и и справа от текущей центральной + pages_count_for_add_to_left_and_right_from_current_central_page = center_pages_count / 2 + + pages.append((u'...',mid_left_page)) + for p in range(mid_page-pages_count_for_add_to_left_and_right_from_current_central_page,mid_page+pages_count_for_add_to_left_and_right_from_current_central_page+1): + pages.append((p,p)) + pages.append((u'...',mid_rigth_page)) + # mid_left_page = (mid_page-1 -3)/2 + 3 + # mid_rigth_page = (pages_count - (mid_page+1 +2))/2 + mid_page+1 + # + # pages.append((u'...',mid_left_page)) + # pages.append((mid_page-1,mid_page-1)) + # pages.append((mid_page,mid_page)) + # pages.append((mid_page+1,mid_page+1)) + # pages.append((u'...',mid_rigth_page)) + + + for p in range(pages_count+1-pages_count_for_begin_end_paging,pages_count+1): + pages.append((p,p)) + # pages.append((pages_count-2,pages_count-2)) + # pages.append((pages_count-1,pages_count-1)) + # pages.append((pages_count,pages_count)) + if not to_page: + to_page = from_page + + return { + 'paging' : pages, + 'from_page' : from_page, + 'to_page' : to_page, + 'max_page' : pages_count, + } diff --git a/BaseModels/pil_graphic_utils.py b/BaseModels/pil_graphic_utils.py new file mode 100644 index 0000000..9b6df0d --- /dev/null +++ b/BaseModels/pil_graphic_utils.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- + +__author__ = 'SDE' + + +import os +from PIL import Image + +def get_thumbnail_url(image_url, size=150): + thumbs_part = 'thumbs_' + str(size) + image_url_parts = image_url.rsplit('/', 1) + return image_url_parts[0] + '/' + thumbs_part + '/' + image_url_parts[1] + +def get_thumbnail_path(image_path, size=150): + thumbs_dir = 'thumbs_' + str(size) + dirname, filename = os.path.split(image_path) + dirname = os.path.join(dirname, thumbs_dir) + if not os.path.exists(dirname): + os.mkdir(dirname, mode=0o755) + return os.path.join(dirname, filename) + +def create_thumbnail(image_path, size=150): + thumb_path = get_thumbnail_path(image_path, size) + delete_thumbnail(image_path, size) + img = Image.open(image_path) + img.thumbnail((size, size), Image.ANTIALIAS) + img.save(thumb_path) + +def delete_thumbnail(image_path, size=150): + thumb_path = get_thumbnail_path(image_path, size) + if os.path.exists(thumb_path): + os.remove(thumb_path) \ No newline at end of file diff --git a/BaseModels/search_funcs.py b/BaseModels/search_funcs.py new file mode 100644 index 0000000..d0a30e9 --- /dev/null +++ b/BaseModels/search_funcs.py @@ -0,0 +1,123 @@ +try: + import settings_local + + pg_fts_config = 'pg_catalog.russian' # 'public.mipp_fulltext' +except: + pg_fts_config = 'pg_catalog.russian' + +from django.db import models +from django.contrib.postgres.search import Value, Func +import copy + + +# получаем из списка только слова содержащие цифры +def get_list_words_contains_nums(txt): + from .inter import numbers + + if type(txt) == str: + words = txt.split(' ') + else: + words = txt + + words_w_nums = [] + + # получаем слова с цифрами + res_words = [] + + for word in words: + i = 0 + + while i < len(word): + if word[i] in numbers: + res_words.append(word) + break + i += 1 + + return res_words + + +# получаем список слов с разделенными цифрами и текстом +def get_list_split_words_w_nums(txt): + from .inter import numbers + + if type(txt) == str: + words = txt.split(' ') + else: + words = txt + + # words_w_nums = [] + + # получаем слова с цифрами + words_w_devided_nums = [] + for word in copy.copy(words): + + i = 0 + is_number = False + cut_piece_compete = False + + while i < len(word): + if i == 0: + if word[i] in numbers: + is_number = True + else: + is_number = False + else: + if word[i] in numbers: + if not is_number: + cut_piece_compete = True + else: + if is_number: + cut_piece_compete = True + + if cut_piece_compete: + cut_piece_compete = False + words_w_devided_nums.append(word[0:i]) + + # if is_number: + # words_w_nums.append(word[0:i]) + + word = word[i:] + i = 0 + else: + i += 1 + + if i > 0: + words_w_devided_nums.append(word[0:i]) + # if is_number: + # words_w_nums.append(word[0:i]) + + return words_w_devided_nums + + +class Headline(Func): + function = 'ts_headline' + + def __init__(self, field, query, config=None, options=None, **extra): + expressions = [field, query] + if config: + expressions.insert(0, Value(config)) + if options: + expressions.append(Value(options)) + extra.setdefault('output_field', models.TextField()) + super(Headline, self).__init__(*expressions, **extra) + + +def get_search_lexems_list(search_phrase): + from django.db import connection + search_lexems_list = None + + cursor = connection.cursor() + cursor.execute("SET NAMES 'UTF8';") + # cursor.execute(u"SET CHARACTER SET 'utf8';") + # cursor.execute(u"SET character_set_connection='utf8';") + cursor.execute("SELECT plainto_tsquery('{1}', '{0}');".format(search_phrase, pg_fts_config)) + search_lexems = cursor.fetchone() + s = search_lexems[0] # .decode('utf8') + + if search_lexems: + search_lexems = s.replace('\\', '') + search_lexems = search_lexems.replace("'", '') + search_lexems = search_lexems.replace(" ", '') + search_lexems_list = search_lexems.split('&') + + return search_lexems_list diff --git a/BaseModels/search_optimization/OEMBED/OEMBED_data.py b/BaseModels/search_optimization/OEMBED/OEMBED_data.py new file mode 100644 index 0000000..e5cdedd --- /dev/null +++ b/BaseModels/search_optimization/OEMBED/OEMBED_data.py @@ -0,0 +1,2 @@ +# https://oembed.com/ +# https://habr.com/ru/post/141303/ \ No newline at end of file diff --git a/BaseModels/search_optimization/RSS/ya_RSS_chanel.py b/BaseModels/search_optimization/RSS/ya_RSS_chanel.py new file mode 100644 index 0000000..18a0e2e --- /dev/null +++ b/BaseModels/search_optimization/RSS/ya_RSS_chanel.py @@ -0,0 +1 @@ +# https://yandex.ru/dev/turbo/doc/quick-start/articles.html \ No newline at end of file diff --git a/BaseModels/search_optimization/__init__.py b/BaseModels/search_optimization/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/BaseModels/search_optimization/google_AMP/AMP.py b/BaseModels/search_optimization/google_AMP/AMP.py new file mode 100644 index 0000000..d97aee9 --- /dev/null +++ b/BaseModels/search_optimization/google_AMP/AMP.py @@ -0,0 +1,2 @@ +# https://amp.dev/ru/ +# https://www.seonews.ru/analytics/optimization-2020-vnedrenie-amp-dlya-internet-magazina-bez-poteri-konversii-v-google/ \ No newline at end of file diff --git a/BaseModels/search_optimization/google_AMP/web_story_code.py b/BaseModels/search_optimization/google_AMP/web_story_code.py new file mode 100644 index 0000000..3c36a01 --- /dev/null +++ b/BaseModels/search_optimization/google_AMP/web_story_code.py @@ -0,0 +1 @@ +# https://developers.google.com/search/docs/advanced/appearance/enable-web-stories?hl=ru#google-discover \ No newline at end of file diff --git a/BaseModels/search_optimization/google_tips b/BaseModels/search_optimization/google_tips new file mode 100644 index 0000000..dd308ca --- /dev/null +++ b/BaseModels/search_optimization/google_tips @@ -0,0 +1 @@ + https://developers.google.com/search/docs/beginner/seo-starter-guide?hl=ru#understand_your_content \ No newline at end of file diff --git a/BaseModels/search_optimization/ld_json/__init__.py b/BaseModels/search_optimization/ld_json/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/BaseModels/search_optimization/ld_json/ld_ speakebale.py b/BaseModels/search_optimization/ld_json/ld_ speakebale.py new file mode 100644 index 0000000..2eec250 --- /dev/null +++ b/BaseModels/search_optimization/ld_json/ld_ speakebale.py @@ -0,0 +1,38 @@ + + +def get_ld_speakebale(name, theme_xpath, info_xpath, url): + + data = { + "@context": "https://schema.org/", + "@type": "WebPage", + "name": name, + "speakable": { + "@type": "SpeakableSpecification", + "xPath": [ + theme_xpath, + info_xpath + ] + }, + "url": url + } + + return data + + # Speakable markup example + # + # \ No newline at end of file diff --git a/BaseModels/search_optimization/ld_json/ld_FAQ.py b/BaseModels/search_optimization/ld_json/ld_FAQ.py new file mode 100644 index 0000000..1254fa3 --- /dev/null +++ b/BaseModels/search_optimization/ld_json/ld_FAQ.py @@ -0,0 +1,22 @@ + + +def get_ld_FAQ(data_Dict): + + FAQ_list = [] + for key, val in data_Dict.items(): + FAQ_list.append({ + "@type": "Question", + "name": key, + "acceptedAnswer": { + "@type": "Answer", + "text": val + } + }) + + data = { + "@context": "https://schema.org", + "@type": "FAQPage", + "mainEntity": FAQ_list + } + + return data \ No newline at end of file diff --git a/BaseModels/search_optimization/ld_json/ld_QA.py b/BaseModels/search_optimization/ld_json/ld_QA.py new file mode 100644 index 0000000..5201c9d --- /dev/null +++ b/BaseModels/search_optimization/ld_json/ld_QA.py @@ -0,0 +1,36 @@ + + +def get_ld_QA(data_Dict): + + data = { + "@context": "https://schema.org", + "@type": "QAPage", + "mainEntity": { + "@type": "Question", + "name": "How many ounces are there in a pound?", + "text": "I have taken up a new interest in baking and keep running across directions in ounces and pounds. I have to translate between them and was wondering how many ounces are in a pound?", + "answerCount": 3, + "upvoteCount": 26, + "acceptedAnswer": { + "@type": "Answer", + "text": "1 pound (lb) is equal to 16 ounces (oz).", + "upvoteCount": 1337, + "url": "https://example.com/question1#acceptedAnswer" + }, + "suggestedAnswer": [ + { + "@type": "Answer", + "text": "Are you looking for ounces or fluid ounces? If you are looking for fluid ounces there are 15.34 fluid ounces in a pound of water.", + "upvoteCount": 42, + "url": "https://example.com/question1#suggestedAnswer1" + }, { + "@type": "Answer", + "text": " I can't remember exactly, but I think 18 ounces in a lb. You might want to double check that.", + "upvoteCount": 0, + "url": "https://example.com/question1#suggestedAnswer2" + } + ] + } + } + + return data \ No newline at end of file diff --git a/BaseModels/search_optimization/ld_json/ld_article_news.py b/BaseModels/search_optimization/ld_json/ld_article_news.py new file mode 100644 index 0000000..f40c472 --- /dev/null +++ b/BaseModels/search_optimization/ld_json/ld_article_news.py @@ -0,0 +1,41 @@ + +import json + +import project_sets +from project_sets import * +from django.urls import reverse +from django.utils.html import strip_tags + +def get_ld_article_news(art_name, art_txt, art_DT, url_data): + from BaseModels.inter import get_all_photos_from_html_content + + img_list = get_all_photos_from_html_content(art_txt) + if img_list: + img_list = list(map(lambda img: "{0}{1}".format(project_sets.domain, img), img_list)) + + data = { + "@context": "https://schema.org", + "@type": "NewsArticle", + "url": "{0}{1}".format(project_sets.domain, reverse(**url_data)), + "publisher":{ + "@type":"Organization", + "name": project_sets.company_name, + "logo": project_sets.logo + }, + "author": { + "@type": "Organization", + "name": project_sets.company_name, + "logo": project_sets.logo, + "url": project_sets.domain, + }, + "headline": art_name, + # "mainEntityOfPage": "http://www.bbc.com/news/world-us-canada-39324587", # ссылка на источник + "articleBody": strip_tags(art_txt), + "datePublished": art_DT.isoformat() + } + if img_list: + data.update({ + 'image': img_list + }) + + return json.dumps(data) \ No newline at end of file diff --git a/BaseModels/search_optimization/ld_json/ld_breadcrambs.py b/BaseModels/search_optimization/ld_json/ld_breadcrambs.py new file mode 100644 index 0000000..6e30779 --- /dev/null +++ b/BaseModels/search_optimization/ld_json/ld_breadcrambs.py @@ -0,0 +1,39 @@ +import json + +def get_ld_breadcrambs(items_list): + + elements_list = [] + i = 1 + while i <= len(items_list): + item = items_list[i-1] + url = None + if type(item) == str: + name = item + elif type(item) == dict: + name = item['name'] + url = item['url'] + else: + name = item.name + url = item.url + + Dict = { + "@type": "ListItem", + "position": i, + "name": name, + } + if i < len(items_list): + Dict.update({ + "item": url + }) + + elements_list.append(Dict) + + i += 1 + + data = { + "@context": "https://schema.org", + "@type": "BreadcrumbList", + "itemListElement": elements_list + } + + return json.dumps(data) \ No newline at end of file diff --git a/BaseModels/search_optimization/ld_json/ld_company.py b/BaseModels/search_optimization/ld_json/ld_company.py new file mode 100644 index 0000000..2dee7af --- /dev/null +++ b/BaseModels/search_optimization/ld_json/ld_company.py @@ -0,0 +1,243 @@ +import json + +import project_sets +from collections import OrderedDict + +def get_ld_logo(): + data = { + "@context": "https://schema.org", + "@type": "Organization", + "url": project_sets.domain, + "logo": project_sets.logo + } + return data + + +def get_ld_company(offices): + try: + main_office = offices.get(main_office=True) + except: + main_office = offices[0] + + + data = { + "@context": "https://schema.org", + "@type": "LocalBusiness", + "logo": project_sets.logo, + } + + ld_for_main_office = get_ld_office(main_office) + data.update(ld_for_main_office) + + + departments = [] + for office in offices: + if office == main_office: + continue + + departments.append(get_ld_office(office)) + + # if departments: + # data.update({ + # 'department': departments + # }) + + return json.dumps(data) + + +def get_ld_office(office): + + try: + phones = office.phones() + except: + phones = [] + + if not phones: + try: + phones = office.rel_contacts_for_office + except: + phones = [] + + data = { + "name": office.name, + } + + # На каждой странице (с разметкой или без нее) должно присутствовать хотя бы одно изображение. Робот Google выберет лучшее изображение для показа в результатах поиска с учетом соотношения сторон и разрешения. + # URL изображений должны быть доступны для сканирования и индексирования. Проверить, есть ли у поискового робота Google доступ к URL вашего контента, можно с помощью инструмента, описанного в этой статье. + # Изображения должны соответствовать размеченному контенту. + # Допускаются только графические файлы форматов, совместимых с Google Картинками. + # Предоставьте несколько изображений в высоком разрешении (не менее 50 000 пикселей по произведению ширины и высоты) со следующими соотношениями сторон: 16 × 9, 4 × 3 и 1 × 1. + data.update({ + "image": [ + project_sets.logo, + ] + }) + + # data.update({ + # "@type": "Store", + # }) + # не обязательно! + + # AnimalShelter + # ArchiveOrganization + # AutomotiveBusiness + # ChildCare + # Dentist + # DryCleaningOrLaundry + # EmergencyService + # EmploymentAgency + # EntertainmentBusiness + # FinancialService + # FoodEstablishment + # GovernmentOffice + # HealthAndBeautyBusiness + # HomeAndConstructionBusiness + # InternetCafe + # LegalService + # Library + # LodgingBusiness + # MedicalBusiness + # ProfessionalService + # RadioStation + # RealEstateAgent + # RecyclingCenter + # SelfStorage + # ShoppingCenter + # SportsActivityLocation + # Store + # TelevisionStation + # TouristInformationCenter + # TravelAgency + + i_Dict = { + "address": { + "@type": "PostalAddress", + "streetAddress": office.address, + "addressLocality": office.city, + # "addressRegion": "CA", + # "postalCode": "95129", + # "addressCountry": "US" + }, + } + if phones: + i_Dict["address"].update({ + "telephone": '{0}{1}'.format(phones[0].prefix, phones[0].nomber_phone), + }) + + data.update(i_Dict) + + gps_longitude = getattr(office, 'gps_longitude', None) + gps_latitude = getattr(office, 'gps_latitude', None) + if not gps_longitude: + gps_longitude = getattr(project_sets, 'gps_longitude', None) + if not gps_latitude: + gps_latitude = getattr(project_sets, 'gps_latitude', None) + if gps_longitude and gps_latitude: + i_Dict = { + "geo": { + "@type": "GeoCoordinates", + "latitude": gps_latitude, + "longitude": gps_longitude + }, + } + data.update(i_Dict) + + data.update({ + "url": project_sets.domain + }) + + # "foundingDate": "2005-02-07", # дата основания + + company_reference_links = getattr(project_sets, 'company_reference_links') + if company_reference_links: + data.update({ + "sameAs": company_reference_links + }) + + priceRange = getattr(office, 'priceRange', '$') + if priceRange: + data.update({ + "priceRange": priceRange + }) + + work_time_from = getattr(office, 'work_time_from', None) + if not work_time_from: + work_time_from = getattr(project_sets, 'work_time_from', '9:00') + work_time_to = getattr(office, 'work_time_to', None) + if not work_time_to: + work_time_to = getattr(project_sets, 'work_time_to', '18:00') + + i_Dict = { + "openingHoursSpecification": [ + { + "@type": "OpeningHoursSpecification", + "dayOfWeek": [ + "https://schema.org/Monday", + "https://schema.org/Tuesday", + "https://schema.org/Wednesday", + "https://schema.org/Thursday", + "https://schema.org/Friday", + # "https://schema.org/Saturday" + ], + "opens": work_time_from, + "closes": work_time_to + }, + # { + # "@type": "OpeningHoursSpecification", + # "dayOfWeek": "Sunday", + # "opens": "08:00", + # "closes": "23:00" + # } + ], + } + # i_Dict = { + # "openingHoursSpecification": [ + # { + # "@type": "OpeningHoursSpecification", + # "dayOfWeek": "https://schema.org/Monday", + # "opens": work_time_from, + # "closes": work_time_to + # }, + # { + # "@type": "OpeningHoursSpecification", + # "dayOfWeek": "https://schema.org/Tuesday", + # "opens": work_time_from, + # "closes": work_time_to + # }, + # { + # "@type": "OpeningHoursSpecification", + # "dayOfWeek": "https://schema.org/Wednesday", + # "opens": work_time_from, + # "closes": work_time_to + # }, + # { + # "@type": "OpeningHoursSpecification", + # "dayOfWeek": "https://schema.org/Thursday", + # "opens": work_time_from, + # "closes": work_time_to + # }, + # { + # "@type": "OpeningHoursSpecification", + # "dayOfWeek": "https://schema.org/Friday", + # "opens": work_time_from, + # "closes": work_time_to + # }, + # { + # "@type": "OpeningHoursSpecification", + # "dayOfWeek": "https://schema.org/Saturday", + # "opens": work_time_from, + # "closes": work_time_to + # }, + # { + # "@type": "OpeningHoursSpecification", + # "dayOfWeek": "https://schema.org/Sunday", + # "opens": work_time_from, + # "closes": work_time_to + # }, + # ], + # } + data.update(i_Dict) + + return data + + diff --git a/BaseModels/search_optimization/ld_json/ld_media_video_object.py b/BaseModels/search_optimization/ld_json/ld_media_video_object.py new file mode 100644 index 0000000..0b3cf12 --- /dev/null +++ b/BaseModels/search_optimization/ld_json/ld_media_video_object.py @@ -0,0 +1,53 @@ +import json +import re + +import project_sets +from django.urls import reverse +from django.utils.html import strip_tags + + +def create_videoobject(video_path, name, description, DT): + video_id = video_path.split('/')[-1] + thumbs = list(map(lambda s: "https://img.youtube.com/vi/{0}/{1}.jpg".format(video_id, str(s)), range(1, 5))) + + data = { + "@context": "https://schema.org", + "@type": "VideoObject", + "name": name, + "description": description, + "thumbnailUrl": thumbs, + "uploadDate": DT.isoformat(), + # "duration": "PT1M54S", # продолжительность видео + # "contentUrl": "https://www.example.com/video/123/file.mp4", # адрес к видеофайлу + "embedUrl": video_path, + # "interactionStatistic": { # количество просмотров + # "@type": "InteractionCounter", + # "interactionType": { "@type": "WatchAction" }, + # "userInteractionCount": 5647018 + # }, + # "regionsAllowed": "US,NL" # разрешенные регионы + } + return data + + +def get_ld_videoobjects_for_page_html(obj, name, description, DT, content): + from BaseModels.inter import get_all_videos_from_html_content + res_list = [] + + if obj.video: + data = create_videoobject(obj.video, name, description, DT) + res_list.append(json.dumps(data)) + + if not content: + return res_list + + videos_list = get_all_videos_from_html_content(content) + # if videos_list: + # img_list = list(map(lambda img: "{0}{1}".format(project_sets.domain, img), videos_list)) + + for video_path in videos_list: + if not video_path in obj.video and not obj.video in video_path: + data = create_videoobject(video_path, name, description, DT) + res_list.append(json.dumps(data)) + + return res_list diff --git a/BaseModels/search_optimization/ld_json/ld_product.py b/BaseModels/search_optimization/ld_json/ld_product.py new file mode 100644 index 0000000..d2e67a1 --- /dev/null +++ b/BaseModels/search_optimization/ld_json/ld_product.py @@ -0,0 +1,178 @@ +# import json +# +# import project_sets +# from BaseModels.functions import add_domain +# +# +# def get_ld_shipping_data_for_product(shipping_terms): +# shipping_terms_list = [] +# for item in shipping_terms: +# data = { +# "@type": "OfferShippingDetails", +# "shippingRate": { +# "@type": "MonetaryAmount", +# "value": item.price, +# "currency": project_sets.base_currency +# }, +# "shippingDestination": { +# "@type": "DefinedRegion", +# "addressCountry": project_sets.shipping_region, # обязательно +# # "postalCodeRange": { +# # "postalCodeBegin": "98100", +# # "postalCodeEnd": "98199" +# # } +# }, +# "deliveryTime": { +# "@type": "ShippingDeliveryTime", +# "cutOffTime": project_sets.cutOffTime, # "19:30-08:00", +# +# # Стандартное время от получения оплаты до отправки товаров со склада (или подготовки к самовывозу, если используется такой вариант) +# "handlingTime": { +# "@type": "QuantitativeValue", +# "minValue": "0", # дней +# "maxValue": "1" # дней +# }, +# # Стандартное время от отправки заказа до его прибытия к конечному покупателю. +# "transitTime": { +# "@type": "QuantitativeValue", +# "minValue": "1", # дней +# "maxValue": "5" # дней +# }, +# # Время, после которого новые заказы не обрабатываются в тот же день +# +# # Дни недели, по которым вы обрабатываете заказы +# "businessDays": { +# "@type": "OpeningHoursSpecification", +# "dayOfWeek": ["https://schema.org/Monday", "https://schema.org/Tuesday", +# "https://schema.org/Wednesday", "https://schema.org/Thursday"] +# } +# } +# } +# +# shipping_terms_list.append(data) +# +# data = { +# "shippingDetails": shipping_terms_list +# } +# +# return data +# +# +# def get_ld_offers_for_product(product, domain, shipping_terms): +# data = { +# "offers": { +# "@type": "Offer", +# "url": '{0}{1}'.format(domain, product.get_site_url()), +# "itemCondition": "https://schema.org/NewCondition", +# # "https://schema.org/NewCondition" +# # "https://schema.org/UsedCondition" +# "availability": "https://schema.org/InStock", +# # https://schema.org/BackOrder +# # https://schema.org/Discontinued +# # https://schema.org/InStock +# # https://schema.org/InStoreOnly +# # https://schema.org/LimitedAvailability +# # https://schema.org/OnlineOnly +# # https://schema.org/OutOfStock +# # https://schema.org/PreOrder +# # https://schema.org/PreSale +# # https://schema.org/SoldOut +# "price": str(product.price), +# "priceCurrency": project_sets.base_currency, +# # "priceValidUntil": "2020-11-20", #дата окончания действия цены +# # "shippingSettingsLink": '{0}{1}'.format(project_sets.domain, 'delivery/'), +# +# }, +# } +# +# if shipping_terms: +# data["offers"].update(get_ld_shipping_data_for_product(shipping_terms)) +# +# return data +# +# +# def get_aggregate_rating(product): +# data = { +# # "review": { +# # "@type": "Review", +# # "reviewRating": { +# # "@type": "Rating", +# # "ratingValue": "4", +# # "bestRating": "5" +# # }, +# # "author": { +# # "@type": "Person", +# # "name": "Fred Benson" +# # } +# # }, +# "aggregateRating": { +# "@type": "AggregateRating", +# "ratingValue": product.ratingValue, +# "reviewCount": product.reviewCount +# } +# } +# +# return data +# +# +# def get_ld_product(product, domain, shipping_terms): +# from GeneralApp.views import get_cur_domain +# serv_domain, local_domain = get_cur_domain() +# +# data = { +# "@context": "https://schema.org/", +# "@type": "Product", +# "name": product.name, +# "sku": '{0}-{1}'.format(str(product.brand), str(product.article)), +# "url": '{0}{1}'.format(domain, product.get_site_url()), +# } +# +# if product.description: +# data.update({ +# "description": product.description, +# }) +# +# barcode = getattr(product, 'barcode', None) +# if barcode: +# data.update({ +# "gtin14": barcode, +# }) +# +# gallery = getattr(product, 'gallery', None) +# if gallery: +# try: +# photos = gallery.get_photos() +# photos = list(map(lambda ph: '{0}{1}'.format(serv_domain, ph), photos)) +# except Exception as e: +# photos = None +# +# if photos: +# data.update({ +# "image": photos, +# }) +# +# brand = getattr(product, 'brand', None) +# if brand: +# if type(brand) not in [str]: +# brand = brand.name +# +# data.update({ +# "brand": { +# "@type": "Brand", +# "name": brand +# }, +# }) +# +# FAQ = {} +# +# from ... +# +# aggregate_rating = getattr(product, 'ratingValue', None) +# if aggregate_rating != None: +# data.update(get_aggregate_rating(product)) +# +# price = getattr(product, 'price', None) +# if price: +# data.update(get_ld_offers_for_product(product, domain, shipping_terms)) +# +# return json.dumps(data) diff --git a/BaseModels/search_optimization/ld_json/ld_search.py b/BaseModels/search_optimization/ld_json/ld_search.py new file mode 100644 index 0000000..9161def --- /dev/null +++ b/BaseModels/search_optimization/ld_json/ld_search.py @@ -0,0 +1,22 @@ +import json +import project_sets + +def get_ld_search(domain): + + # Только для главной страницы + + data = { + "@context": "https://schema.org", + "@type": "WebSite", + "url": domain, #"https://truenergy.by/", + "potentialAction": { + "@type": "SearchAction", + "target": { + "@type": "EntryPoint", + "urlTemplate": "{domain}/{search_term_string}/".format(domain=domain, search_term_string='{search_term_string}') + }, + "query-input": "required name=search_term_string" + } + } + + return json.dumps(data) \ No newline at end of file diff --git a/BaseModels/search_optimization/ld_json/ld_vacancy.py b/BaseModels/search_optimization/ld_json/ld_vacancy.py new file mode 100644 index 0000000..7a5996c --- /dev/null +++ b/BaseModels/search_optimization/ld_json/ld_vacancy.py @@ -0,0 +1,140 @@ +import datetime +import project_sets + + +def get_ld_vacancies(data_Dict): + + # Разметку JobPosting можно размещать только на страницах, которые содержат одно объявление о вакансии. + # Не разрешается добавлять разметку JobPosting на какие-либо другие страницы, в том числе те, на которых нет информации ни об одной вакансии. + + vacancies_list = [] + + for item in data_Dict: + data = { + "@context": "https://schema.org/", + "@type": "JobPosting", + "title": item['title'], + "description": item['description'], + "datePosted": datetime.datetime.now().strftime('%Y-%m-%d'), + "validThrough": item['validThrough'].strftime('%Y-%m-%dT%H:%M'), #"2017-03-18T00:00", # окончание срока действия + "identifier": { + "@type": "PropertyValue", + "name": project_sets.company_name, + "value": str(item['id']) + }, + "hiringOrganization": { + "@type": "Organization", + "name": project_sets.company_name, + "sameAs": project_sets.domain, + "logo": project_sets.logo + }, + } + + if 'office' in item: + # используется для указания места, в котором сотрудник будет выполнять работу. Если определенного места (например, офиса или производственной площадки) нет, использовать это свойство не обязательно. + job_place_Dict = { + "jobLocation": { + "@type": "Place", + "address": { + "@type": "PostalAddress", + "streetAddress": item['office'].address, + "addressLocality": item['office'].city, + "addressCountry": "BY" + }, + }, + } + else: + job_place_Dict = { + "jobLocationType": "TELECOMMUTE" # только удаленка + } + data.update(job_place_Dict) + + if 'required_country_of_residence' in item: + # используется для указания территории, на которой может проживать кандидат на должность. Необходимо, чтобы была задана по меньшей мере одна страна + required_country_of_residence = { + "applicantLocationRequirements": { + "@type": "Country", + "name": item['required_country_of_residence']['country'] + }, + } + data.update(required_country_of_residence) + + if 'salary' in item: + salary_Dict = { + "baseSalary": { + "@type": "MonetaryAmount", + "currency": item['salary']['currency'], + "value": { + "@type": "QuantitativeValue", + "unitText": item['salary']['time_unit'] + # HOUR + # DAY + # WEEK + # MONTH + # YEAR + } + } + } + if 'price' in item['salary']: + salary_Dict['baseSalary']['value']['value'] = item['salary']['price'] + elif 'price_from' in item['salary']: + salary_Dict['baseSalary']['value']['minValue'] = item['salary']['price_from'] + + if 'price_to' in item['salary']: + salary_Dict['baseSalary']['value']['maxValue'] = item['salary']['price_to'] + + data.update(salary_Dict) + + # Указание на то, поддерживается ли на странице с объявлением о вакансии отправка резюме напрямую. + data.update({ + 'directApply': item['directApply'] + }) + + # Вид занятости Укажите одно или несколько значений + if 'employmentType' in item: + # FULL_TIME + # PART_TIME + # CONTRACTOR + # TEMPORARY + # INTERN + # VOLUNTEER + # PER_DIEM + # OTHER + data.update({ + 'employmentType': item['employmentType'] + }) + + if 'educationRequirements' in item: + e_Dict = { + "educationRequirements": { + "@type": "EducationalOccupationalCredential", + "credentialCategory": item['educationRequirements'] + # high school + # associate degree + # bachelor degree + # professional certificate + # postgraduate degree + }, + } + data.update(e_Dict) + + if 'experienceRequirements' in item: + e_Dict = { + "experienceRequirements": { + "@type": "OccupationalExperienceRequirements", + "monthsOfExperience": item['experienceRequirements'] # опыт работы в месяцах + }, + } + data.update(e_Dict) + + # Со значением "истина" это свойство будет указывать на то, что кандидатам достаточно иметь опыт, если у них нет требуемого образования + if 'required_only_experience' in item: + if 'experienceRequirements' in item and 'educationRequirements' in item: + data.update({ + 'experienceInPlaceOfEducation': item['required_only_experience'] + }) + + vacancies_list.append(data) + + return vacancies_list + diff --git a/BaseModels/search_optimization/ya_YML/ya_YML.py b/BaseModels/search_optimization/ya_YML/ya_YML.py new file mode 100644 index 0000000..9d40705 --- /dev/null +++ b/BaseModels/search_optimization/ya_YML/ya_YML.py @@ -0,0 +1 @@ +# https://yandex.ru/dev/turbo-shop/doc/quick-start/markets.html \ No newline at end of file diff --git a/BaseModels/search_optimization/ya_tips b/BaseModels/search_optimization/ya_tips new file mode 100644 index 0000000..0b89985 --- /dev/null +++ b/BaseModels/search_optimization/ya_tips @@ -0,0 +1 @@ +https://yandex.ru/support/webmaster/index.html \ No newline at end of file diff --git a/BaseModels/seo_text_generators.py b/BaseModels/seo_text_generators.py new file mode 100644 index 0000000..c5e36eb --- /dev/null +++ b/BaseModels/seo_text_generators.py @@ -0,0 +1,258 @@ +from BaseModels.inter import cut_to_number_w_point + + +def generate_seotext_by_properties(product_data_Dict): + + power_txt = '' + ip_txt = '' + lm_txt = '' + temp_txt = '' + install_txt = '' + diametr_txt = '' + + try: + + if 'diameter' in product_data_Dict: + val = int(product_data_Dict['diameter']) + else: + val = int(product_data_Dict['width']) + + diametr_txt = '{0} truEnergy {1} серии {2}.
'.format( + product_data_Dict['product_type']['name'].upper(), + product_data_Dict['article'], + product_data_Dict['product_series']['name'].upper() + ) + + # if product_data_Dict['product_type']['name'] == 'Светильник светодиодный': + # + # if val < 100: + # diametr_txt = '{0} truEnergy {1} серии {2} - это хорошее решение для дома.
'.format( + # product_data_Dict['product_type']['name'].upper(), + # product_data_Dict['article'], + # product_data_Dict['product_series']['name'].upper() + # ) + # + # elif val < 150: + # diametr_txt = '{0} truEnergy {1} серии {2} отлично подойдет для освещения вашей квартиры, дома или офиса.
'.format( + # product_data_Dict['product_type']['name'].upper(), + # product_data_Dict['article'], + # product_data_Dict['product_series']['name'].upper() + # ) + # + # else: + # diametr_txt = '{0} truEnergy {1} серии {2} - это энергоэффективное освещение для различных площадей и объектов.
'.format( + # product_data_Dict['product_type']['name'].upper(), + # product_data_Dict['article'], + # product_data_Dict['product_series']['name'].upper() + # ) + # # не светильник + # else: + # diametr_txt = '{0} truEnergy {1} серии {2} - это энергоэффективное решение для освещения различных пространств.
'.format( + # product_data_Dict['product_type']['name'].upper(), + # product_data_Dict['article'], + # product_data_Dict['product_series']['name'].upper() + # ) + except Exception as e: + pass + + # --------- + for property in product_data_Dict['properties_w_values_filtred']: + + # ------ + + try: + + if property['property']['name'] == 'Мощность': + power = int(property['property_value']) + + if power < 7: + power_txt = 'Обладая низким энергопотреблением, этот {0} является заменой лампочки накаливания мощностью до 40 Ватт.
'.format( + product_data_Dict['product_type']['name'].lower(), + ) + + elif power < 13: + power_txt = 'Энергоэффективность этого устройства позволяет использовть его в местах, ' \ + 'где ранее использовались светильники с лампами накаливания мощностью до 75 Ватт.
'.format( + ) + elif power < 19: + power_txt = 'Этот {0} мощностью {1} Ватт легко заменит старые лампы накаливания мощностью до 100 Ватт ' \ + 'или люминесцентные лампы мощностью до 40 Ватт.
'.format( + product_data_Dict['product_type']['name'].lower(), + str(power) + ) + + elif power < 37: + power_txt = 'Данная модель подходит для освещения больших пространств. ' \ + 'Она не только поможет решить вопрос освещения, но и существенно сэкономит бюджет, ' \ + 'выделенный на решение этой задачи.
'.format( + product_data_Dict['product_type']['name'].lower(), + ) + else: + power_txt = '{0} Ватт, в данной модели обеспечивает мощный световой поток. ' \ + 'Это дает возможность установки одного изделия для освещения помещений с большой ' \ + 'площадью или открытых пространств.
'.format( + str(power), + product_data_Dict['product_type']['name'].lower(), + ) + + except Exception as e: + pass + + # ------ + + try: + + if property['property']['name'] == 'Световой поток' and product_data_Dict['article'] != '11043': + val = int(property['property_value']) + + if product_data_Dict['product_type']['name'] == 'Светильник светодиодный': + lm_txt = 'Один {0} данной модели способен осветить до {1} м.кв. площади ' \ + 'для рабочих зон и жилых комнат, и до {2} м.кв. площади для проходных и подсобных помещений ' \ + '(при стандартной высоте потолка и нормальной освещенности помещения).
'.format( + product_data_Dict['product_type']['name'].lower(), + str(round(val / 300,2)), + str(round(val / 120, 2)), + ) + + except Exception as e: + pass + + # ------- + + try: + + if property['property']['name'] == 'IP (пылевлагозащита)': + val = int(property['property_value']) + + if val > 66: + ip_txt = 'Максимальная защита IP{0} способна выдержать самые сильные испытания водой. ' \ + 'Освещение с такой защитой используют для фонтанов и бассейнов.
'.format( + str(val), + ) + + elif val > 64: + ip_txt = 'Данный продукт имеет высокую степень пылевлагозащиты - IP{0}. В связи с этим данная модель прекрасно подходит как ' \ + 'для отапливаемых помещений с нормальным уровнем влажности, так и для помещений неотапливаемых, ' \ + 'а также для эксплуатации на улице. Устройство с данной степенью защиты не боится пыли и влаги' \ + 'а так же имеет защиту от струй воды со всех направлений.
'.format( + str(val), + ) + + elif val > 60: + ip_txt = 'Степень защиты IP{0} обозначает полную защиту от брызг с любых сторон и имеет полную пылинепроницаемость ' \ + '(никакая пыль не может проникнуть внутрь корпуса устройства). ' \ + 'Светильники подходят для установки в помещении и на улице, при рабочих температурах -20 до +40 градусов.
'.format( + str(val), + ) + + elif val > 53: + ip_txt = 'У изделия с степенью защиты IP{0} снижена возможность попадания пыли внутрь корпуса ' \ + 'и обеспечена полная защита расположенной внутри устройстав электроники.' \ + 'Часто используют для рабочих помещений с повышенным содержанием пыли и влаги, а также под навесами.
'.format( + str(val), + product_data_Dict['product_type']['name'].lower(), + product_data_Dict['product_type']['name_plural'].lower(), + ) + + elif val > 40: + ip_txt = 'Могут устанавливаться в помещения с повышенным уровнем пыли.'.format( + product_data_Dict['product_type']['name'].lower(), + ) + else: + ip_txt = 'IP{0} - степень защиты данной модели, в связи с этим могут устанавливаться в' \ + ' отапливаемые помещения с умеренным уровнем влажности.
'.format( + str(val), + ) + + except Exception as e: + pass + + # ------- + + try: + + if property['property']['name'] == 'Цветовая температура': + val = int(property['property_value']) + + if val < 3001: + temp_txt = 'Теплый свет, генерируемый этой моделью способствует отдыху и расслаблению. ' \ + 'Он приятен для глаз. В связи с этим рекомендуется устанавливать {0} ' \ + 'с температурой {1}К в зоны отдыха, жилые комнаты и спальни, кафе, лаундж зоны. ' \ + 'Очень удачное решение для обеденных и гостинных комнат.
'.format( + product_data_Dict['product_type']['name_plural'].lower(), + str(val), + ) + + elif val < 4601: + temp_txt = 'Модель обладает нейтральным цветом свечения, который прекрасно подходит и как для жилых помещений и комнат, ' \ + 'так и для рабочих зон (офисов, кабинетов, производств) . ' \ + 'Данный свет стимулирует к работе не вызывая перенапряжения глаз и не искажая цветопередачу. ' \ + 'Универсальное и наиболее распространенное решение.
'.format( + str(val), + ) + + elif val < 7001: + temp_txt = 'Цветовая температура {0}К - наиболее оптимально использование в помещениях промышленного назначения, ' \ + 'административных зданиях, на производствах, складах, гаражах, паркингах. ' \ + 'Однако могут применяться и в интерьере для создания акцентов в дизайне, ' \ + 'либо если предпочтения потребителя отданы в пользу белого света.
'.format( + str(val), + ) + + + + else: + temp_txt = 'От показателя цветовой температуры зависит то, как Вы будут воспринимать предметы и другие объекты освещенные устройством. ' \ + 'С помощью цветовой температуры можно сделать более приятным отдых и улучшить эффективность работы. ' \ + 'Отниситесь внимательно к выбору устройства по этому параметру.
'.format( + str(val), + ) + except Exception as e: + pass + + # ------- + + try: + + if property['property']['name'] == 'Тип монтажа': + val = property['property_value'] + + if val == 'встраиваемый': + install_txt = 'Устройство устанавливается в предварительно вырезанное в поверхности отверстие. ' \ + 'Этот вариант монтажа используется для подвесных и натяжных потолков, а так же для фальш-стен и ниш.'.format( + str(val), + ) + + elif val == 'накладной': + install_txt = 'Способ крепления - накладной. Значит эта модель может быть закреплена на любую ровную поверхность.'.format( + str(val), + ) + + elif val == 'встраиваемый/накладной': + install_txt = '{0} обладает возможностью монтажа как в отверстия на поверхности плоскостей, так и на любую ровную поверхность.'.format( + product_data_Dict['article'], + ) + + else: + pass + + if 'height_visible_part' in product_data_Dict and product_data_Dict['height_visible_part']: + install_txt = install_txt + ' Высота видимой части устройства после монтажа составит {0}мм.
'.format( + str(round(product_data_Dict['height_visible_part'])) + ) + else: + install_txt = install_txt + '
' + + except Exception as e: + pass + + product_data_Dict['seo_text'] = '{0}{1}{2}{3}{4}{5}'.format( + diametr_txt, + power_txt, + lm_txt, + ip_txt, + temp_txt, + install_txt + ) + + return product_data_Dict \ No newline at end of file diff --git a/BaseModels/templates/m_show_message.html b/BaseModels/templates/m_show_message.html new file mode 100644 index 0000000..cb9ba16 --- /dev/null +++ b/BaseModels/templates/m_show_message.html @@ -0,0 +1,15 @@ + diff --git a/BaseModels/templatetags/__init__.py b/BaseModels/templatetags/__init__.py new file mode 100644 index 0000000..14c7ff2 --- /dev/null +++ b/BaseModels/templatetags/__init__.py @@ -0,0 +1 @@ +__author__ = 'SDE' diff --git a/BaseModels/templatetags/base_tags_extra.py b/BaseModels/templatetags/base_tags_extra.py new file mode 100644 index 0000000..7ff4027 --- /dev/null +++ b/BaseModels/templatetags/base_tags_extra.py @@ -0,0 +1,155 @@ +__author__ = 'SDE' + +from django import template +from django.template.defaultfilters import stringfilter + +register = template.Library() + +from django.core.serializers import serialize +from django.db.models.query import QuerySet +import simplejson +from django.template import Library +from django.utils.html import mark_safe + +@register.filter('get_value_from_dict') +def get_value_from_dict(dict_data, key): + """ + usage example {{ your_dict|get_value_from_dict:your_key }} + """ + + if key in dict_data: + res = dict_data[key] + return res + + return False + + + +@register.filter() +def get_rows_count_by_cols_count(data, cols_count): + rows_count = len(data) // cols_count + if len(data) % cols_count: + rows_count += 1 + return rows_count + +@register.filter() +def get_numbers_list(from_el, to_el): + res = range(from_el, to_el+1) + return res + + +def val_type(value): + res = type(value) + return res.__name__ +register.filter('val_type', val_type) + +@register.filter() +def get_cols_table_data_for_row_when_cols3(value, row): + el_count = 3 + from_el = (row-1) * el_count + to_el = row * el_count + part = list(value)[from_el:to_el] + return part +# register.filter('val_type', val_type) + + +@register.filter +@stringfilter +def correct_for_tables(value): + if value in ['None', '0.0']: + return '-' + return value + + +@register.filter +@stringfilter +def del_bad_symbols(value): + from BaseModels.functions import del_bad_symbols + return del_bad_symbols(value) + + +@register.filter +@stringfilter +def del_amp_symbols(value): + from BaseModels.functions import del_nbsp + return del_nbsp(value) + +@register.filter +@stringfilter +def del_lang_from_path(value): + path_list = value.split('/') + path = u'' + for i in path_list[1:]: + path.join(i + '/') + return path + +@register.filter +@stringfilter +def get_color_by_number(value, arg=None): + + color = None + try: + val = float(value) + + if not color and arg == u'%': + + color = u'black' + if val > 50: + color = u'green' + elif val <= 50 and val >= 25: + color = u'#6c8107' + elif val <= 25 and val >= 10: + color = u'#a89803' + elif val <= 10 and val >= 5: + color = u'#e6a707' + elif val <= 5 and val >= 0: + color = u'#e67307' + elif val <= 0: + color = u'red' + + + # val_range = val_max - val_min + # # val_percent = (val_range * 100 / val) - 100 + # offset = -(val_min + -(val)) + # if val <0: + # val = offset + # if val > val_max: + # val = val_max + # elif val < 0: + # val = 0 + # + # color_range = 16711680 - 1211136 + # val_1unit = float(color_range) / float(val_range) + # dec_color = 16711680 - int(val_1unit * val) + + if not color: + color = u'black' + if val > 1000: + color = u'green' + elif val <= 1000 and val >= 500: + color = u'#6c8107' + elif val <= 500 and val >= 250: + color = u'#a89803' + elif val <= 250 and val >= 125: + color = u'#e6a707' + elif val <= 125 and val >= 50: + color = u'#e67307' + elif val <= 50: + color = u'red' + + # s = u'style="color: #{0}12;"'.format(str(hex(dec_color))[2:6]) + s = u'style="color: {0};"'.format(color) + return s + except: + return u'' + + +@register.filter +@stringfilter +def check_aprox_compare_strings(search_phrase, txt): + from ProductApp.search import get_highlight_string + + s = get_highlight_string(search_phrase, txt) + + return s + diff --git a/FirePlayProj/__init__.py b/FirePlayProj/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/FirePlayProj/asgi.py b/FirePlayProj/asgi.py new file mode 100644 index 0000000..95c0119 --- /dev/null +++ b/FirePlayProj/asgi.py @@ -0,0 +1,16 @@ +""" +ASGI config for FirePlayProj project. + +It exposes the ASGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/4.2/howto/deployment/asgi/ +""" + +import os + +from django.core.asgi import get_asgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'FirePlayProj.settings') + +application = get_asgi_application() diff --git a/FirePlayProj/settings.py b/FirePlayProj/settings.py new file mode 100644 index 0000000..a853698 --- /dev/null +++ b/FirePlayProj/settings.py @@ -0,0 +1,252 @@ +""" +Django settings for FirePlayProj project. + +Generated by 'django-admin startproject' using Django 4.2.1. + +For more information on this file, see +https://docs.djangoproject.com/en/4.2/topics/settings/ + +For the full list of settings and their values, see +https://docs.djangoproject.com/en/4.2/ref/settings/ +""" + +from pathlib import Path + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = Path(__file__).resolve().parent.parent + + +# Quick-start development settings - unsuitable for production +# See https://docs.djangoproject.com/en/4.2/howto/deployment/checklist/ + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = 'django-insecure-k=2q3&t1pufsxpu#)0hfd(#!9%horaq$krbbxm=7*w$0x5(h1b' + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = True + +ALLOWED_HOSTS = [] + + +# Application definition + +INSTALLED_APPS = [ + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + + 'ckeditor', + 'ckeditor_uploader', + + 'AuthApp', + 'QuestionsApp', + 'GameApp', +] + +MIDDLEWARE = [ + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +ROOT_URLCONF = 'FirePlayProj.urls' + +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [BASE_DIR / 'templates'] + , + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +WSGI_APPLICATION = 'FirePlayProj.wsgi.application' + + +# Database +# https://docs.djangoproject.com/en/4.2/ref/settings/#databases + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.postgresql_psycopg2', + 'NAME': 'fireGameDB', + 'USER': 'test_user', + 'PASSWORD': 'test_db_pass', + 'HOST': '127.0.0.1', + 'PORT': '5432', + } +} + + +# Password validation +# https://docs.djangoproject.com/en/4.2/ref/settings/#auth-password-validators + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + + +# Internationalization +# https://docs.djangoproject.com/en/4.2/topics/i18n/ + +LANGUAGE_CODE = 'ru-RU' + +TIME_ZONE = 'Europe/Minsk' + +USE_I18N = True + +USE_TZ = True + + +# Static files (CSS, JavaScript, Images) +# https://docs.djangoproject.com/en/4.2/howto/static-files/ + +MEDIA_URL = '/media/' +MEDIA_ROOT = 'media/' + +STATIC_URL = '/static/' +STATIC_ROOT = '/' + +# Default primary key field type +# https://docs.djangoproject.com/en/4.2/ref/settings/#default-auto-field + +DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' + + +gettext = lambda s: s +LANGUAGES = ( + (u'ru', gettext(u'Russian')), + (u'en', gettext(u'English')), + # (u'de', gettext(u'Deutsch')), + # (u'fr', gettext(u'Francais')), +) +MODELTRANSLATION_LANGUAGES = ('ru', 'en') +MODELTRANSLATION_ENABLE_FALLBACKS = True +MODELTRANSLATION_FALLBACK_LANGUAGES = { + 'default': ('ru','en'), + 'ru': ('ru','en'), + 'en': ('en', 'ru'), +} + +# Add custom languages not provided by Django +import django.conf.locale +LANG_INFO = dict(django.conf.locale.LANG_INFO.items()) +django.conf.locale.LANG_INFO = LANG_INFO + +CKEDITOR_BASEPATH = "/static/ckeditor/ckeditor/" +CKEDITOR_UPLOAD_PATH = "uploads/" + +CKEDITOR_IMAGE_BACKEND = "pillow" + +# CKEDITOR_BROWSE_SHOW_DIRS = True + +CKEDITOR_JQUERY_URL = 'https://ajax.googleapis.com/ajax/libs/jquery/2.2.4/jquery.min.js' + +CKEDITOR_OPTIONS = { + 'height': 291, + 'width': '95%', + 'filebrowserWindowHeight': 600, + 'filebrowserWindowWidth': "100%", + 'toolbar': 'YourCustomToolbarConfig', + + 'allowedContent': True, + + 'enterMode': 2, + 'basicEntities' : False, + 'entities_additional': '', + 'entities' : False, + 'htmlEncodeOutput' : False, + 'toolbar_Basic': [ + ['Source', '-', 'Bold', 'Italic'] + ], + 'toolbar_YourCustomToolbarConfig': [ + {'name': 'document', 'items': ['Source', '-', 'Save', 'NewPage', 'Preview', 'Print', '-', 'Templates']}, + {'name': 'clipboard', 'items': ['Cut', 'Copy', 'Paste', 'PasteText', 'PasteFromWord', '-', 'Undo', 'Redo']}, + {'name': 'editing', 'items': ['Find', 'Replace', '-', 'SelectAll']}, + {'name': 'forms', + 'items': ['Form', 'Checkbox', 'Radio', 'TextField', 'Textarea', 'Select', 'Button', 'ImageButton', + 'HiddenField']}, + '/', + {'name': 'basicstyles', + 'items': ['Bold', 'Italic', 'Underline', 'Strike', 'Subscript', 'Superscript', '-', 'RemoveFormat']}, + {'name': 'paragraph', + 'items': ['NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', '-', 'Blockquote', 'CreateDiv', '-', + 'JustifyLeft', 'JustifyCenter', 'JustifyRight', 'JustifyBlock', '-', 'BidiLtr', 'BidiRtl', + 'Language']}, + {'name': 'links', 'items': ['Link', 'Unlink', 'Anchor']}, + {'name': 'insert', + 'items': ['Image', 'Flash', 'Table', 'HorizontalRule', 'Smiley', 'SpecialChar', 'PageBreak', 'Iframe']}, + '/', + {'name': 'styles', 'items': ['FontSize']}, + {'name': 'colors', 'items': ['TextColor', 'BGColor']}, + {'name': 'tools', 'items': ['Maximize', 'ShowBlocks']}, + # {'name': 'about', 'items': ['About']}, + '/', # put this to force next toolbar on new line + # {'name': 'yourcustomtools', 'items': [ + # # put the name of your editor.ui.addButton here + # 'Preview', + # 'Maximize', + # + # ]}, + ], + + 'tabSpaces': 4, + 'removePlugins': 'stylesheetparser', + # 'extraPlugins': ','.join([ + # 'uploadimage', # the upload image feature + # # your extra plugins here + # 'div', + # 'autolink', + # 'autoembed', + # 'embedsemantic', + # 'autogrow', + # # 'devtools', + # 'widget', + # 'lineutils', + # 'clipboard', + # 'dialog', + # 'dialogui', + # 'elementspath' + # ]), + +} + + +try: + from tEDataProj import db_local_sets + DATABASES = db_local_sets.DATABASES +except ImportError as e: + pass + +# global prod_server +try: + from settings_local import * + prod_server = True +except ImportError: + prod_server = False \ No newline at end of file diff --git a/FirePlayProj/urls.py b/FirePlayProj/urls.py new file mode 100644 index 0000000..8566b3c --- /dev/null +++ b/FirePlayProj/urls.py @@ -0,0 +1,27 @@ +""" +URL configuration for FirePlayProj project. + +The `urlpatterns` list routes URLs to views. For more information please see: + https://docs.djangoproject.com/en/4.2/topics/http/urls/ +Examples: +Function views + 1. Add an import: from my_app import views + 2. Add a URL to urlpatterns: path('', views.home, name='home') +Class-based views + 1. Add an import: from other_app.views import Home + 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') +Including another URLconf + 1. Import the include() function: from django.urls import include, path + 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) +""" + +from django.contrib import admin +from django.urls import path, include +from django.conf.urls.static import static +from django.conf import settings + +urlpatterns = [ + path('admin/', admin.site.urls), + path('ckeditor/', include('ckeditor_uploader.urls')), +] +urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) \ No newline at end of file diff --git a/FirePlayProj/wsgi.py b/FirePlayProj/wsgi.py new file mode 100644 index 0000000..4d7800e --- /dev/null +++ b/FirePlayProj/wsgi.py @@ -0,0 +1,16 @@ +""" +WSGI config for FirePlayProj project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/4.2/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'FirePlayProj.settings') + +application = get_wsgi_application() diff --git a/GameApp/__init__.py b/GameApp/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/GameApp/admin.py b/GameApp/admin.py new file mode 100644 index 0000000..c4467cf --- /dev/null +++ b/GameApp/admin.py @@ -0,0 +1,16 @@ +from django.contrib import admin +from BaseModels.admin_utils import Admin_BaseIconModel +from .models import * + +class Admin_Game(Admin_BaseIconModel): + pass +admin.site.register(Game, Admin_Game) + +class Admin_UserInGame(Admin_BaseIconModel): + pass +admin.site.register(UserInGame, Admin_UserInGame) + + +class Admin_QuestionInGameForUser(Admin_BaseIconModel): + pass +admin.site.register(QuestionInGameForUser, Admin_QuestionInGameForUser) \ No newline at end of file diff --git a/GameApp/apps.py b/GameApp/apps.py new file mode 100644 index 0000000..14baafe --- /dev/null +++ b/GameApp/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class GameappConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'GameApp' diff --git a/GameApp/migrations/0001_initial.py b/GameApp/migrations/0001_initial.py new file mode 100644 index 0000000..9499d5b --- /dev/null +++ b/GameApp/migrations/0001_initial.py @@ -0,0 +1,84 @@ +# Generated by Django 4.2.1 on 2023-05-16 14:01 + +from django.conf import settings +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('QuestionsApp', '0001_initial'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='Game', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.TextField(blank=True, help_text='Название', null=True, verbose_name='Название')), + ('name_plural', models.TextField(blank=True, null=True, verbose_name='Название (множественное число)')), + ('order', models.IntegerField(blank=True, null=True, verbose_name='Очередность отображения')), + ('createDT', models.DateTimeField(auto_now_add=True, verbose_name='Дата и время создания')), + ('modifiedDT', models.DateTimeField(blank=True, null=True, verbose_name='Дата и время последнего изменения')), + ('enable', models.BooleanField(db_index=True, default=True, verbose_name='Включено')), + ('json_data', models.JSONField(blank=True, default=dict, verbose_name='Дополнительные данные')), + ('level', models.IntegerField(default=1, verbose_name='Уровень игры')), + ('status', models.CharField(default='waiting_users', max_length=100, verbose_name='Статус')), + ('time_for_waiting_users', models.IntegerField(default=15, verbose_name='Время ожидания пользователей (сек)')), + ('cur_lap', models.IntegerField(default=0, verbose_name='Текущий круг')), + ('comment', models.TextField(blank=True, null=True, verbose_name='Комментарий')), + ('winner', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='rel_games_for_user', to=settings.AUTH_USER_MODEL, verbose_name='id пользователя')), + ], + options={ + 'verbose_name': 'Игра', + 'verbose_name_plural': 'Игры', + }, + ), + migrations.CreateModel( + name='UserInGame', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.TextField(blank=True, help_text='Название', null=True, verbose_name='Название')), + ('name_plural', models.TextField(blank=True, null=True, verbose_name='Название (множественное число)')), + ('order', models.IntegerField(blank=True, null=True, verbose_name='Очередность отображения')), + ('createDT', models.DateTimeField(auto_now_add=True, verbose_name='Дата и время создания')), + ('modifiedDT', models.DateTimeField(blank=True, null=True, verbose_name='Дата и время последнего изменения')), + ('enable', models.BooleanField(db_index=True, default=True, verbose_name='Включено')), + ('json_data', models.JSONField(blank=True, default=dict, verbose_name='Дополнительные данные')), + ('last_lap', models.IntegerField(default=0, verbose_name='Последний круг')), + ('status', models.CharField(default='waiting_users', max_length=100, verbose_name='Статус')), + ('game', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='rel_userInGame_for_game', to='GameApp.game', verbose_name='Игра')), + ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='rel_userInGame_for_user', to=settings.AUTH_USER_MODEL, verbose_name='Игрок')), + ], + options={ + 'verbose_name': 'Игрок', + 'verbose_name_plural': 'Игроки', + }, + ), + migrations.CreateModel( + name='QuestionInGameForUser', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.TextField(blank=True, help_text='Название', null=True, verbose_name='Название')), + ('name_plural', models.TextField(blank=True, null=True, verbose_name='Название (множественное число)')), + ('order', models.IntegerField(blank=True, null=True, verbose_name='Очередность отображения')), + ('createDT', models.DateTimeField(auto_now_add=True, verbose_name='Дата и время создания')), + ('modifiedDT', models.DateTimeField(blank=True, null=True, verbose_name='Дата и время последнего изменения')), + ('enable', models.BooleanField(db_index=True, default=True, verbose_name='Включено')), + ('json_data', models.JSONField(blank=True, default=dict, verbose_name='Дополнительные данные')), + ('status', models.CharField(default='wait', max_length=100, verbose_name='Статус')), + ('answer_right', models.BooleanField(default=False, verbose_name='Верный ответ')), + ('use_time_for_answer', models.IntegerField(default=0, verbose_name='Потрачено времени на ответ (сек)')), + ('question', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='rel_questionInGame_for_question', to='QuestionsApp.question', verbose_name='Вопрос')), + ('user_in_game', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='rel_questionInGame_for_userInGame', to='GameApp.useringame', verbose_name='Игра')), + ], + options={ + 'verbose_name': 'Игрок', + 'verbose_name_plural': 'Игроки', + }, + ), + ] diff --git a/GameApp/migrations/0002_alter_questioningameforuser_options_and_more.py b/GameApp/migrations/0002_alter_questioningameforuser_options_and_more.py new file mode 100644 index 0000000..9d7645c --- /dev/null +++ b/GameApp/migrations/0002_alter_questioningameforuser_options_and_more.py @@ -0,0 +1,21 @@ +# Generated by Django 4.2.1 on 2023-05-16 14:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('GameApp', '0001_initial'), + ] + + operations = [ + migrations.AlterModelOptions( + name='questioningameforuser', + options={'verbose_name': 'Вопрос в игре', 'verbose_name_plural': 'Вопросы в игре'}, + ), + migrations.AlterModelOptions( + name='useringame', + options={'verbose_name': 'Пользователь в игре', 'verbose_name_plural': 'Пользователи в игре'}, + ), + ] diff --git a/GameApp/migrations/__init__.py b/GameApp/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/GameApp/models.py b/GameApp/models.py new file mode 100644 index 0000000..4191233 --- /dev/null +++ b/GameApp/models.py @@ -0,0 +1,94 @@ +from django.db import models +from BaseModels.base_models import BaseModel +from django.utils.translation import gettext_lazy as _ +from AuthApp.models import User + +game_status_choices = ( + ('waiting_users', 'Ожидание пользователей'), + ('game', 'В процессе'), + ('finished', 'Завершена') +) + + +class Game(BaseModel): + level = models.IntegerField(verbose_name=_('Уровень игры'), default=1) + status = models.CharField(max_length=100, verbose_name=_('Статус'), default='waiting_users') + time_for_waiting_users = models.IntegerField(verbose_name=_('Время ожидания пользователей (сек)'), default=15) + cur_lap = models.IntegerField(verbose_name=_('Текущий круг'), default=0) + + winner = models.OneToOneField(User, verbose_name=_(u'id пользователя'), related_name='rel_games_for_user', + null=True, blank=True, on_delete=models.CASCADE) + + comment = models.TextField(verbose_name=_('Комментарий'), null=True, blank=True) + + def __str__(self): + if self.name: + return f'{self.name}' + else: + return str(self.id) + + + class Meta: + + verbose_name = _('Игра') + verbose_name_plural = _('Игры') + # ordering = ('user__last_name', 'user__first_name') + + +user_in_game_status_choices = ( + ('waiting_users', 'Ожидание пользователей'), + ('in_game', 'В игре'), + ('lose', 'Выбыл'), + ('finish', 'Завершил игру') +) + +class UserInGame(BaseModel): + game = models.OneToOneField( + Game, verbose_name=_('Игра'), related_name='rel_userInGame_for_game', on_delete=models.CASCADE) + user = models.OneToOneField( + User, verbose_name=_('Игрок'), related_name='rel_userInGame_for_user', on_delete=models.CASCADE) + + last_lap = models.IntegerField(verbose_name=_('Последний круг'), default=0) + + status = models.CharField(max_length=100, verbose_name=_('Статус'), default='waiting_users') + + def __str__(self): + if self.name: + return f'{self.name}' + else: + return str(self.id) + + + class Meta: + verbose_name = _('Пользователь в игре') + verbose_name_plural = _('Пользователи в игре') + # ordering = ('question') + + +question_in_game_status_choices = ( + ('wait', 'Ожидание ответа'), + ('answered', 'Ответил'), +) + +class QuestionInGameForUser(BaseModel): + from QuestionsApp.models import Question + user_in_game = models.OneToOneField( + UserInGame, verbose_name=_('Игра'), related_name='rel_questionInGame_for_userInGame', on_delete=models.CASCADE) + question = models.OneToOneField( + Question, verbose_name=_('Вопрос'), related_name='rel_questionInGame_for_question', on_delete=models.CASCADE) + + status = models.CharField(max_length=100, verbose_name=_('Статус'), default='wait') + answer_right = models.BooleanField(verbose_name=_('Верный ответ'), default=False) + use_time_for_answer = models.IntegerField(verbose_name=_('Потрачено времени на ответ (сек)'), default=0) + + def __str__(self): + if self.name: + return f'{self.name}' + else: + return str(self.id) + + + class Meta: + verbose_name = _('Вопрос в игре') + verbose_name_plural = _('Вопросы в игре') + # ordering = ('question') \ No newline at end of file diff --git a/GameApp/tests.py b/GameApp/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/GameApp/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/GameApp/views.py b/GameApp/views.py new file mode 100644 index 0000000..91ea44a --- /dev/null +++ b/GameApp/views.py @@ -0,0 +1,3 @@ +from django.shortcuts import render + +# Create your views here. diff --git a/QuestionsApp/__init__.py b/QuestionsApp/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/QuestionsApp/admin.py b/QuestionsApp/admin.py new file mode 100644 index 0000000..1f124ec --- /dev/null +++ b/QuestionsApp/admin.py @@ -0,0 +1,11 @@ +from django.contrib import admin +from BaseModels.admin_utils import Admin_BaseIconModel +from .models import * + +class Admin_Question(Admin_BaseIconModel): + pass +admin.site.register(Question, Admin_Question) + +class Admin_Answer(Admin_BaseIconModel): + pass +admin.site.register(Answer, Admin_Answer) \ No newline at end of file diff --git a/QuestionsApp/apps.py b/QuestionsApp/apps.py new file mode 100644 index 0000000..56c7dfd --- /dev/null +++ b/QuestionsApp/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class QuestionsappConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'QuestionsApp' diff --git a/QuestionsApp/migrations/0001_initial.py b/QuestionsApp/migrations/0001_initial.py new file mode 100644 index 0000000..dfaa6f8 --- /dev/null +++ b/QuestionsApp/migrations/0001_initial.py @@ -0,0 +1,58 @@ +# Generated by Django 4.2.1 on 2023-05-16 14:01 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='Question', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.TextField(blank=True, help_text='Название', null=True, verbose_name='Название')), + ('name_plural', models.TextField(blank=True, null=True, verbose_name='Название (множественное число)')), + ('order', models.IntegerField(blank=True, null=True, verbose_name='Очередность отображения')), + ('createDT', models.DateTimeField(auto_now_add=True, verbose_name='Дата и время создания')), + ('modifiedDT', models.DateTimeField(blank=True, null=True, verbose_name='Дата и время последнего изменения')), + ('enable', models.BooleanField(db_index=True, default=True, verbose_name='Включено')), + ('json_data', models.JSONField(blank=True, default=dict, verbose_name='Дополнительные данные')), + ('game_level', models.IntegerField(default=1, verbose_name='Уровень игры')), + ('time_for_answer', models.IntegerField(default=7, verbose_name='Время на ответ (сек)')), + ('used_count', models.IntegerField(default=0, verbose_name='Количество использования')), + ('comment', models.TextField(blank=True, null=True, verbose_name='Комментарий')), + ], + options={ + 'verbose_name': 'Вопрос', + 'verbose_name_plural': 'Вопросы', + 'permissions': (), + }, + ), + migrations.CreateModel( + name='Answer', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.TextField(blank=True, help_text='Название', null=True, verbose_name='Название')), + ('name_plural', models.TextField(blank=True, null=True, verbose_name='Название (множественное число)')), + ('order', models.IntegerField(blank=True, null=True, verbose_name='Очередность отображения')), + ('createDT', models.DateTimeField(auto_now_add=True, verbose_name='Дата и время создания')), + ('modifiedDT', models.DateTimeField(blank=True, null=True, verbose_name='Дата и время последнего изменения')), + ('enable', models.BooleanField(db_index=True, default=True, verbose_name='Включено')), + ('json_data', models.JSONField(blank=True, default=dict, verbose_name='Дополнительные данные')), + ('right_answer', models.BooleanField(default=False, verbose_name='Правильный')), + ('comment', models.TextField(blank=True, null=True, verbose_name='Комментарий')), + ('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='rel_answers_for_question', to='QuestionsApp.question', verbose_name='Вопрос')), + ], + options={ + 'verbose_name': 'Ответ', + 'verbose_name_plural': 'Ответы', + 'ordering': ('question',), + }, + ), + ] diff --git a/QuestionsApp/migrations/__init__.py b/QuestionsApp/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/QuestionsApp/models.py b/QuestionsApp/models.py new file mode 100644 index 0000000..8a326a6 --- /dev/null +++ b/QuestionsApp/models.py @@ -0,0 +1,45 @@ +from django.db import models +from BaseModels.base_models import BaseModel +from django.utils.translation import gettext_lazy as _ + +class Question(BaseModel): + + game_level = models.IntegerField(verbose_name=_('Уровень игры'), default=1) + time_for_answer = models.IntegerField(verbose_name=_('Время на ответ (сек)'), default=7) + used_count = models.IntegerField(verbose_name=_('Количество использования'), default=0) + comment = models.TextField(verbose_name=_('Комментарий'), null=True, blank=True) + + def __str__(self): + if self.name: + return f'{self.name}' + else: + return str(self.id) + + + class Meta: + permissions = ( + + ) + + verbose_name = _('Вопрос') + verbose_name_plural = _('Вопросы') + # ordering = ('user__last_name', 'user__first_name') + + +class Answer(BaseModel): + question = models.ForeignKey( + Question, verbose_name=_('Вопрос'), related_name='rel_answers_for_question', on_delete=models.CASCADE) + right_answer = models.BooleanField(verbose_name=_('Правильный'), default=False) + comment = models.TextField(verbose_name=_('Комментарий'), null=True, blank=True) + + def __str__(self): + if self.name: + return f'{self.name}' + else: + return str(self.id) + + + class Meta: + verbose_name = _('Ответ') + verbose_name_plural = _('Ответы') + ordering = ('question',) \ No newline at end of file diff --git a/QuestionsApp/tests.py b/QuestionsApp/tests.py new file mode 100644 index 0000000..7ce503c --- /dev/null +++ b/QuestionsApp/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/QuestionsApp/views.py b/QuestionsApp/views.py new file mode 100644 index 0000000..91ea44a --- /dev/null +++ b/QuestionsApp/views.py @@ -0,0 +1,3 @@ +from django.shortcuts import render + +# Create your views here. diff --git a/files_widget/__init__.py b/files_widget/__init__.py new file mode 100644 index 0000000..93c01d9 --- /dev/null +++ b/files_widget/__init__.py @@ -0,0 +1 @@ +from .fields import FileField, FilesField, ImageField, ImagesField diff --git a/files_widget/admin.py b/files_widget/admin.py new file mode 100644 index 0000000..f15ff8e --- /dev/null +++ b/files_widget/admin.py @@ -0,0 +1,21 @@ +from django.contrib import admin + +from .models import IconSet, FileIcon +from .conf import * + +# currently not used +class MyModelAdmin(admin.ModelAdmin): + def get_urls(self): + urls = super(MyModelAdmin, self).get_urls() + my_urls = patterns('', + (r'^my_view/$', self.my_view) + ) + return my_urls + urls + + def my_view(self, request): + # custom view which should return an HttpResponse + pass + + +#admin.site.register(IconSet) +#admin.site.register(FileIcon) diff --git a/files_widget/conf.py b/files_widget/conf.py new file mode 100644 index 0000000..5ed1310 --- /dev/null +++ b/files_widget/conf.py @@ -0,0 +1,28 @@ +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured + +MEDIA_URL = getattr(settings, 'MEDIA_URL') +MEDIA_ROOT = getattr(settings, 'MEDIA_ROOT') +TEMP_DIR = getattr(settings, 'FILES_WIDGET_TEMP_DIR', 'temp/uploads/') +TEMP_DIR_FORMAT = getattr(settings, 'FILES_WIDGET_TEMP_DIR_FORMAT', '%4d-%02d-%02d-%02d-%02d') +FILES_DIR = getattr(settings, 'FILES_WIDGET_FILES_DIR', 'uploads/from_admin/') +OLD_VALUE_STR = getattr(settings, 'FILES_WIDGET_OLD_VALUE_STR', 'old_%s_value') +DELETED_VALUE_STR = getattr(settings, 'FILES_WIDGET_DELETED_VALUE_STR', 'deleted_%s_value') +MOVED_VALUE_STR = getattr(settings, 'FILES_WIDGET_MOVED_VALUE_STR', 'moved_%s_value') +JQUERY_PATH = getattr(settings, 'FILES_WIDGET_JQUERY_PATH', '//ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js') +JQUERY_UI_PATH = getattr(settings, 'FILES_WIDGET_JQUERY_UI_PATH', '//ajax.googleapis.com/ajax/libs/jqueryui/1.10.3/jquery-ui.min.js') +USE_FILEBROWSER = getattr(settings, 'FILES_WIDGET_USE_FILEBROWSER', False) +FILEBROWSER_JS_PATH = getattr(settings, 'FILES_WIDGET_FILEBROWSER_JS_PATH', 'filebrowser/js/AddFileBrowser.js') +ADD_IMAGE_BY_URL = getattr(settings, 'FILES_WIDGET_ADD_IMAGE_BY_URL', True) +MAX_FILESIZE = getattr(settings, 'FILES_WIDGET_MAX_FILESIZE', 0) +FILE_TYPES = getattr(settings, 'FILES_WIDGET_FILE_TYPES', None) +USE_TRASH = getattr(settings, 'FILES_WIDGET_USE_TRASH', False) +TRASH_DIR = getattr(settings, 'FILES_WIDGET_TRASH_DIR', 'uploads/trash/files_widget/') + +if not len(MEDIA_URL) or not len(MEDIA_ROOT) or not len(TEMP_DIR) or not len(FILES_DIR): + raise ImproperlyConfigured('MEDIA_URL, MEDIA_ROOT, FILES_WIDGET_TEMP_DIR and FILES_WIDGET_FILES_DIR must not be empty') +if TEMP_DIR == FILES_DIR: + raise ImproperlyConfigured('FILES_WIDGET_TEMP_DIR and FILES_WIDGET_FILES_DIR must be different') + +if not MEDIA_ROOT.endswith('/'): + MEDIA_ROOT += '/' diff --git a/files_widget/controllers.py b/files_widget/controllers.py new file mode 100644 index 0000000..503779d --- /dev/null +++ b/files_widget/controllers.py @@ -0,0 +1,292 @@ +import re +from six.moves import urllib +import os, os.path +from datetime import datetime + +from django.conf import settings +import six +from django.utils.safestring import mark_safe + +from .utils import curry + +from django.core.files.images import ImageFile +from django.core.files.storage import get_storage_class +from django.contrib.staticfiles import finders + +from sorl.thumbnail import get_thumbnail + +from .conf import * + + +class FilePath(six.text_type): + def __new__(cls, str, instance=None, field=None, settings={}): + self = super(FilePath, cls).__new__(cls, str.strip()) + self._instance = instance + self._field = field + self._exists = None + self._size = None + self._accessed_time = None + self._created_time = None + self._modified_time = None + self._thumbnails = {} + self.settings = { + 'img_attrs': {}, + 'thumbnail_size': None, + 'thumbnail_attrs': {}, + } + self.settings.update(settings) + return self + + def _html_attrs(self, **kwargs): + attrs = {} + attrs.update(kwargs) + if 'css_class' in attrs: + attrs['class'] = attrs['css_class'] + del attrs['css_class'] + return attrs + + @property + def unescaped(self): + return urllib.parse.unquote(self) + + @property + def escaped(self): + return urllib.parse.quote(self.unescaped) + + @property + def url(self): + if not self.startswith('/') and self.find('//') == -1: + return os.path.join(MEDIA_URL, self.escaped) + return self.escaped + + @property + def local_path(self): + if not self.startswith('/') and self.find('//') == -1: + return os.path.join(MEDIA_ROOT, urllib.parse.unquote(self)) + return self + + def _get_local_path_or_file(self): + # if file is in static instead of media directory, sorl raises + # a suspicious operation error. So we open it safely without errors. + + if self.startswith('/'): + if self.startswith('/static/'): + path = self.replace('/static/', '') + elif self.startswith(settings.STATIC_URL): + path = self.replace(settings.STATIC_URL, '') + else: + return self.local_path + else: + return self.local_path + + path = finders.find(urllib.parse.unquote(path)) + image = ImageFile(open(path, 'r')) + return image + + @property + def filename(self): + return urllib.parse.unquote(re.sub(r'^.+\/', '', self)) + + @property + def display_name(self): + without_extension = re.sub(r'\.[\w\d]+$', '', self.filename) + with_spaces = re.sub(r'_', ' ', without_extension) + return with_spaces + + @property + def ext(self): + return re.sub(r'^.+\.', '', self.filename) + + def exists(self): + if self._exists == None: + self._exists = os.path.exists(self.local_path) + return self._exists + + def get_size(self): + if self._size == None: + self._size = os.path.getsize(self.local_path) + return self._size + + def get_accessed_time(self): + if self._accessed_time == None: + self._accessed_time = datetime.fromtimestamp(os.path.getatime(self.local_path)) + return self._accessed_time + + def get_created_time(self): + if self._created_time == None: + self._created_time = datetime.fromtimestamp(os.path.getctime(self.local_path)) + return self._created_time + + def get_modified_time(self): + if self._modified_time == None: + self._modified_time = datetime.fromtimestamp(os.path.getmtime(self.local_path)) + return self._modified_time + + +class ImagePath(FilePath): + def img_tag(self, **kwargs): + attrs = {} + attrs.update(self.settings['img_attrs']) + attrs.update(kwargs) + attrs = self._html_attrs(**attrs) + attrs_str = ''.join([ + u'%s="%s" ' % (key, value) + for key, value in attrs.items() + ]) + return mark_safe(u'' % (self.url, attrs_str)) + + def _thumbnail_file_format(self): + if self.ext.lower() in ['gif', 'png']: + return 'PNG' + return 'JPEG' + + def thumbnail(self, size=None, **kwargs): + size = size or self.settings['thumbnail_size'] + if not size: + raise Exception('No thumbnail size supplied') + + attrs = { + 'format': self._thumbnail_file_format(), + 'upscale': False, + } + attrs.update(self.settings['thumbnail_attrs']) + attrs.update(kwargs) + + all_attrs = { 'size': size } + all_attrs.update(attrs) + key = hash(frozenset(all_attrs)) + + if not key in self._thumbnails: + #self._thumbnails[key] = get_thumbnail(self._get_local_path_or_file(), size, **attrs) + self._thumbnails[key] = get_thumbnail(self.local_path, size, **attrs) + + return self._thumbnails[key] + + def thumbnail_tag(self, size, opts={}, **kwargs): + try: + thumbnail = self.thumbnail(size, **opts) + except EnvironmentError as e: + if settings.THUMBNAIL_DEBUG: + raise e + return '' + + src = ImagePath(thumbnail.url, self._instance, self._field) + attrs = { 'width': thumbnail.width, 'height': thumbnail.height } + attrs.update(self.settings['img_attrs']) + attrs.update(kwargs) + return src.img_tag(**attrs) + + def __getattr__(self, attr): + thumbnail_mxn = re.match(r'^thumbnail_(tag_)?(\d*x?\d+)$', attr) + if thumbnail_mxn: + tag = thumbnail_mxn.group(1) == 'tag_' + size = thumbnail_mxn.group(2) + if tag: + return curry(self.thumbnail_tag, size) + else: + return curry(self.thumbnail, size) + + raise AttributeError + + +class FilePaths(six.text_type): + item_class = FilePath + + def __new__(cls, str, instance=None, field=None, settings={}): + self = super(FilePaths, cls).__new__(cls, str) + self._instance = instance + self._field = field + self._all = None + self._length = None + self._current = 0 + self.settings = { + 'img_attrs': {}, + 'thumbnail_size': None, + 'thumbnail_attrs': {}, + } + self.settings.update(settings) + return self + + def all(self): + if self._all == None: + self._all = [] + for f in self.splitlines(): + self._all.append(self._field.attr_class.item_class(f, self._instance, self._field, self.settings)) + + self._length = len(self._all) + + return self._all + + def count(self): + self.all() + return self._length + + def first(self): + return self.all() and self.all()[0] or None + + def last(self): + return self.all() and self.all()[-1] or None + + def next(self): + f = self.all()[self._current] + self._current += 1 + return f + + def next_n(self, n): + files = self.all()[self._current:self._current+n] + self._current += n + return files + + def next_all(self): + files = self.all()[self._current:] + self._current = self._length - 1 + return files + + def has_next(self): + self.all() + return max(0, self._length - self._current - 1) + + def reset(self): + self._current = 0 + + def __getattr__(self, attr): + next_n = re.match(r'^next_(\d+)$', attr) + if next_n: + n = int(next_n.group(1)) + return curry(self.next_n, n) + + raise AttributeError + + +class ImagePaths(FilePaths): + item_class = ImagePath + + def as_gallery(self): + raise NotImplementedError + + def as_carousel(self): + raise NotImplementedError + + +class FilesDescriptor(object): + """ + Used django.db.models.fields.files.FileDescriptor as an example. + This descriptor returns an unicode object, with special methods + for formatting like filename(), absolute(), relative() and img_tag(). + """ + def __init__(self, field): + self.field = field + + def __get__(self, instance=None, owner=None): + if instance is None: + return self + + files = instance.__dict__[self.field.name] + if isinstance(files, six.string_types) and not isinstance(files, (FilePath, FilePaths)): + attr = self.field.attr_class(files, instance, self.field) + instance.__dict__[self.field.name] = attr + + return instance.__dict__[self.field.name] + + def __set__(self, instance, value): + instance.__dict__[self.field.name] = value diff --git a/files_widget/fields.py b/files_widget/fields.py new file mode 100644 index 0000000..d8cca0a --- /dev/null +++ b/files_widget/fields.py @@ -0,0 +1,104 @@ +import os + +from django.db import models +from django import forms +from django.core import exceptions, validators +from django.utils.translation import ugettext_lazy as _ +from django.db.models.signals import post_save +from django.dispatch import receiver + +from .forms import FilesFormField, BaseFilesWidget, FileWidget, FilesWidget, ImageWidget, ImagesWidget +from .files import manage_files_on_disk +from . import controllers +from .conf import * + + +def formfield_defaults(self, default_widget=None, widget=None, form_class=FilesFormField, required=True, **kwargs): + if not isinstance(widget, BaseFilesWidget): + widget = default_widget + + defaults = { + 'form_class': FilesFormField, + 'fields': (forms.CharField(required=required), forms.CharField(required=False), forms.CharField(required=False), ), + 'widget': widget, + } + defaults.update(kwargs) + + return defaults + +def save_all_data(self, instance, data): + # Save old data to know which images are deleted. + # We don't know yet if the form will really be saved. + old_data = getattr(instance, self.name) + setattr(instance, OLD_VALUE_STR % self.name, old_data) + setattr(instance, DELETED_VALUE_STR % self.name, data.deleted_files) + setattr(instance, MOVED_VALUE_STR % self.name, data.moved_files) + + +class FileField(models.CharField): + description = _("File") + attr_class = controllers.FilePath + + def __init__(self, *args, **kwargs): + if 'max_length' not in kwargs: + kwargs['max_length'] = 200 + super(FileField, self).__init__(*args, **kwargs) + + def contribute_to_class(self, cls, name): + super(FileField, self).contribute_to_class(cls, name) + receiver(post_save, sender=cls)(manage_files_on_disk) + setattr(cls, self.name, controllers.FilesDescriptor(self)) + + def save_form_data(self, instance, data): + save_all_data(self, instance, data) + super(FileField, self).save_form_data(instance, data) + + def formfield(self, default_widget=FileWidget(), **kwargs): + defaults = formfield_defaults(self, default_widget, **kwargs) + return super(FileField, self).formfield(**defaults) + + +class FilesField(models.TextField): + description = _("Files") + attr_class = controllers.FilePaths + + def contribute_to_class(self, cls, name): + super(FilesField, self).contribute_to_class(cls, name) + receiver(post_save, sender=cls)(manage_files_on_disk) + setattr(cls, self.name, controllers.FilesDescriptor(self)) + + def save_form_data(self, instance, data): + save_all_data(self, instance, data) + super(FilesField, self).save_form_data(instance, data) + + def formfield(self, default_widget=FilesWidget(), **kwargs): + defaults = formfield_defaults(self, default_widget, **kwargs) + return super(FilesField, self).formfield(**defaults) + + +class ImageField(FileField): + description = _("Image") + attr_class = controllers.ImagePath + + def formfield(self, default_widget=ImageWidget(), **kwargs): + defaults = formfield_defaults(self, default_widget, **kwargs) + return super(ImageField, self).formfield(**defaults) + + +class ImagesField(FilesField): + description = _("Images") + attr_class = controllers.ImagePaths + + def formfield(self, default_widget=ImagesWidget(), **kwargs): + defaults = formfield_defaults(self, default_widget, **kwargs) + return super(ImagesField, self).formfield(**defaults) + + +try: + from south.modelsinspector import add_introspection_rules + add_introspection_rules([], ["^topnotchdev\.files_widget\.fields\.FileField"]) + add_introspection_rules([], ["^topnotchdev\.files_widget\.fields\.FilesField"]) + add_introspection_rules([], ["^topnotchdev\.files_widget\.fields\.ImageField"]) + add_introspection_rules([], ["^topnotchdev\.files_widget\.fields\.ImagesField"]) +except ImportError: + pass diff --git a/files_widget/files.py b/files_widget/files.py new file mode 100644 index 0000000..d3452ec --- /dev/null +++ b/files_widget/files.py @@ -0,0 +1,209 @@ +import os, os.path +from io import FileIO, BufferedWriter +import re +import time + +from django.conf import settings +from django.core.files.storage import default_storage +from django.utils.translation import ugettext_lazy as _ +from django.template.defaultfilters import slugify + +from .conf import * + + +def is_file_image(path): + + try: + from PIL import Image + im = Image.open(path) + im.verify() + return True + except Exception as e: + return False + +def filename_from_path(path): + return re.sub(r'^.+/', '', path) + +def model_slug(model): + return slugify(model._meta.verbose_name_plural) + +def construct_temp_path(user): + now = time.localtime()[0:5] + dir_name = TEMP_DIR_FORMAT % now + return os.path.join(TEMP_DIR, dir_name, str(user.pk)) + +def construct_permanent_path(instance): + model_dir = model_slug(type(instance)) + return os.path.join(FILES_DIR, model_dir, str(instance.pk)) + +def in_directory(path, directory): + # don't try to manipulate with ../../ + full_path = os.path.join(MEDIA_ROOT, path) + return path.startswith(directory) and full_path == os.path.realpath(full_path) + +def in_permanent_directory(path, instance): + full_path = os.path.join(MEDIA_ROOT, path) + return path.startswith(construct_permanent_path(instance)) and full_path == os.path.realpath(full_path) + +def make_temp_directory(filename, user, short=False): + if not short: + public_dir = construct_temp_path(user) + else: + public_dir = f'{TEMP_DIR}' + full_dir = os.path.join(settings.MEDIA_ROOT, public_dir) + + try: + if not os.path.exists(full_dir): + os.makedirs(full_dir) + except EnvironmentError: + # deepest dir already exists + pass + + full_path = os.path.join(full_dir, filename) + available_full_path = default_storage.get_available_name(full_path) + return available_full_path + +def make_permanent_directory(temp_path, instance): + public_dir = construct_permanent_path(instance) + filename = filename_from_path(temp_path) + full_dir = os.path.join(MEDIA_ROOT, public_dir) + + if not os.path.exists(full_dir): + os.makedirs(full_dir) + + full_path = os.path.join(full_dir, filename) + available_full_path = default_storage.get_available_name(full_path) + return available_full_path + +def save_upload(uploaded, filename, raw_data, user): + ''' + raw_data: if True, uploaded is an HttpRequest object with the file being + the raw post data + if False, uploaded has been submitted via the basic form + submission and is a regular Django UploadedFile in request.FILES + ''' + + path = make_temp_directory(filename, user, short=True) + public_path = path.replace(MEDIA_ROOT, '', 1) + + #try: + with BufferedWriter(FileIO(path, "wb")) as dest: + # if the "advanced" upload, read directly from the HTTP request + # with the Django 1.3 functionality + if raw_data: + foo = uploaded.read(1024) + while foo: + dest.write(foo) + foo = uploaded.read(1024) + # if not raw, it was a form upload so read in the normal Django chunks fashion + else: + for c in uploaded.chunks(): + dest.write(c) + # got through saving the upload, report success + return public_path + #except IOError: + # could not open the file most likely + # pass + return False + +def try_to_recover_path(temp_path, instance): + filename = filename_from_path(temp_path) + permanent_directory = construct_permanent_path(instance) + permanent_path = os.path.join(permanent_directory, filename) + full_path = os.path.join(MEDIA_ROOT, permanent_path) + if os.path.exists(full_path): + return permanent_path, True + else: + return temp_path, False + +def move_to_permanent_directory(temp_path, instance): + if temp_path.startswith('/') or temp_path.find('//') != -1 \ + or in_permanent_directory(temp_path, instance): + return temp_path, False + + full_path = make_permanent_directory(temp_path, instance) + public_path = full_path.replace(MEDIA_ROOT, '', 1) + full_temp_path = os.path.join(MEDIA_ROOT, temp_path) + try: + os.link(full_temp_path, full_path) + except EnvironmentError: + return try_to_recover_path(temp_path, instance) + + if in_directory(temp_path, TEMP_DIR): + try: + os.remove(full_temp_path) + except EnvironmentError: + return try_to_recover_path(temp_path, instance) + + return public_path, True + +def manage_files_on_disk(sender, instance, **kwargs): + # Receiver of Django post_save signal. + # At this point we know that the model instance has been saved into the db. + from .fields import ImagesField, ImageField, FilesField, FileField + fields = [field for field in sender._meta.fields if type(field) in [ImagesField, ImageField, FilesField, FileField]] + + for field in fields: + old_value_attr = OLD_VALUE_STR % field.name + deleted_value_attr = DELETED_VALUE_STR % field.name + moved_value_attr = MOVED_VALUE_STR % field.name + if not hasattr(instance, old_value_attr): + continue + + old_images = (getattr(instance, old_value_attr) or '').splitlines() + current_images = (getattr(instance, field.name) or '').splitlines() + deleted_images = (getattr(instance, deleted_value_attr) or '').splitlines() + moved_images = (getattr(instance, moved_value_attr) or '').splitlines() + new_images = [] + changed = False + + # Delete removed images from disk if they are in our FILES_DIR. + # we implement redundant checks to be absolutely sure that + # files must be deleted. For example, if a JS error leads to + # incorrect file lists in the hidden inputs, we reconstruct the old value. + # + # O = old_images, C = current_images, D = deleted_images + # + # what do we do with files that appear in: + # + # --- (OK) do nothing, we don't even know it's name :) + # --D (OK) if in temp dir or permanent dir of inst: delete from disk + # -C- (OK) if not in permanent dir of inst, create hard link if possible; + # if in temp dir, delete + # -CD (ERROR) show warning message after save + # O-- (ERROR) put back in current, show warning message after save + # O-D (OK) if in temp dir or permanent dir of inst: delete from disk + # OC- (OK) if not in permanent dir of inst, create hard link if possible; + # if in temp dir, delete + # OCD (ERROR) show warning message after save + + for img in current_images: + # OC-, -C-, OCD & -CD + new_path = img + if in_directory(img, TEMP_DIR) or in_directory(img, FILES_DIR): + new_path, path_changed = move_to_permanent_directory(img, instance) + if path_changed: + changed = True + new_images.append(new_path) + + for img in deleted_images: + if img not in current_images: + # --D & O-D + if in_permanent_directory(img, instance) or in_directory(img, TEMP_DIR): + try: + os.remove(os.path.join(MEDIA_ROOT, img)) + except EnvironmentError as e: + pass + + for img in old_images: + if img not in current_images and img not in deleted_images and img not in moved_images: + # O-- + changed = True + new_images.append(img) + + delattr(instance, old_value_attr) + delattr(instance, deleted_value_attr) + delattr(instance, moved_value_attr) + if changed: + setattr(instance, field.name, '\n'.join(new_images)) + instance.save() diff --git a/files_widget/forms/__init__.py b/files_widget/forms/__init__.py new file mode 100644 index 0000000..2f76c29 --- /dev/null +++ b/files_widget/forms/__init__.py @@ -0,0 +1,2 @@ +from .fields import * +from .widgets import * diff --git a/files_widget/forms/fields.py b/files_widget/forms/fields.py new file mode 100644 index 0000000..f7a4aba --- /dev/null +++ b/files_widget/forms/fields.py @@ -0,0 +1,62 @@ +from django import forms +from django.core import exceptions, validators +from django.utils.translation import ugettext_lazy as _ +import six + +from files_widget.conf import * + + +class UnicodeWithAttr(six.text_type): + deleted_files = None + moved_files = None + +class FilesFormField(forms.MultiValueField): + def __init__(self, max_length=None, **kwargs): + super(FilesFormField, self).__init__(**kwargs) + + def compress(self, data_list): + files = UnicodeWithAttr(data_list[0]) + files.deleted_files = data_list[1] + files.moved_files = data_list[2] + return files + + def clean(self, value): + """ + This is a copy of MultiValueField.clean() with a BUGFIX: + - if self.required and field_value in validators.EMPTY_VALUES: + + if field.required and field_value in validators.EMPTY_VALUES: + """ + from django.forms.utils import ErrorList + from django.core.exceptions import ValidationError + + clean_data = [] + errors = ErrorList() + if not value or isinstance(value, (list, tuple)): + if not value or not [v for v in value if v not in validators.EMPTY_VALUES]: + if self.required: + raise ValidationError(self.error_messages['required']) + else: + return self.compress(value) + else: + raise ValidationError(self.error_messages['invalid']) + for i, field in enumerate(self.fields): + try: + field_value = value[i] + except IndexError: + field_value = None + if field.required and field_value in validators.EMPTY_VALUES: + raise ValidationError(self.error_messages['required']) + try: + clean_data.append(field.clean(field_value)) + except ValidationError as e: + # Collect all validation errors in a single list, which we'll + # raise at the end of clean(), rather than raising a single + # exception for the first error we encounter. + errors.extend(e.messages) + if errors: + raise ValidationError(errors) + + out = self.compress(clean_data) + self.validate(out) + self.run_validators(out) + return out diff --git a/files_widget/forms/widgets.py b/files_widget/forms/widgets.py new file mode 100644 index 0000000..c12527f --- /dev/null +++ b/files_widget/forms/widgets.py @@ -0,0 +1,100 @@ +from django import forms +from django.conf import settings +from django.utils.translation import ugettext_lazy as _ + +from files_widget.conf import * + + +def use_filebrowser(): + if USE_FILEBROWSER: + try: + import filebrowser + return True + except: + try: + import filebrowser_safe + return True + except: + pass + pass + return False + +TO_HIDE_ATTRS = {'class': 'hidden'} +class BaseFilesWidget(forms.MultiWidget): + def __init__(self, + multiple=False, + preview_size=150, + template="files_widget/files_widget.html", + widgets=(forms.HiddenInput, + forms.HiddenInput, + forms.HiddenInput), + **kwargs): + super(BaseFilesWidget, self).__init__(widgets, **kwargs) + self.multiple = multiple + self.preview_size = preview_size + self.template = template + + class Media: + js = [ + JQUERY_PATH, + JQUERY_UI_PATH, + 'files_widget/js/jquery.iframe-transport.js', + 'files_widget/js/jquery.fileupload.js', + 'files_widget/js/widgets.js', + ] + if use_filebrowser(): + js.append(FILEBROWSER_JS_PATH) + + css = { + 'all': ( + 'files_widget/css/widgets.css', + ), + } + + @property + def is_hidden(self): + return False + + def decompress(self, value): + if value: + return [value, '', '', ] + return ['', '', '', ] + + def render(self, name, value, attrs=None, renderer=None): + if not isinstance(value, list): + value = self.decompress(value) + files, deleted_files, moved_files = value + + context = { + 'MEDIA_URL': settings.MEDIA_URL, + 'STATIC_URL': settings.STATIC_URL, + 'use_filebrowser': use_filebrowser(), + 'add_image_by_url': ADD_IMAGE_BY_URL, + 'input_string': super(BaseFilesWidget, self).render(name, value, attrs, renderer), + 'name': name, + 'files': files, + 'deleted_files': deleted_files, + 'multiple': self.multiple and 1 or 0, + 'preview_size': str(self.preview_size), + } + return renderer.render(self.template, context) + + +class FileWidget(BaseFilesWidget): + def __init__(self, multiple=False, preview_size=128, **kwargs): + super(FileWidget, self).__init__(multiple, preview_size, template="files_widget/files_widget.html", **kwargs) + + +class FilesWidget(BaseFilesWidget): + def __init__(self, multiple=True, preview_size=64, **kwargs): + super(FilesWidget, self).__init__(multiple, preview_size, template="files_widget/files_widget.html", **kwargs) + + +class ImageWidget(BaseFilesWidget): + def __init__(self, multiple=False, preview_size=250, **kwargs): + super(ImageWidget, self).__init__(multiple, preview_size, template="files_widget/images_widget.html", **kwargs) + + +class ImagesWidget(BaseFilesWidget): + def __init__(self, multiple=True, preview_size=150, **kwargs): + super(ImagesWidget, self).__init__(multiple, preview_size, template="files_widget/images_widget.html", **kwargs) diff --git a/files_widget/locale/ko/LC_MESSAGES/django.po b/files_widget/locale/ko/LC_MESSAGES/django.po new file mode 100644 index 0000000..b913fac --- /dev/null +++ b/files_widget/locale/ko/LC_MESSAGES/django.po @@ -0,0 +1,64 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2018-10-10 08:53+0800\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + +#: files_widget/fields.py:39 +msgid "File" +msgstr "" + +#: files_widget/fields.py:60 +msgid "Files" +msgstr "" + +#: files_widget/fields.py:78 +msgid "Image" +msgstr "" + +#: files_widget/fields.py:87 +msgid "Images" +msgstr "" + +#: files_widget/templates/files_widget/files_widget.html:3 +msgid "undo" +msgstr "" + +#: files_widget/templates/files_widget/files_widget.html:6 +msgid "Drop multiple images here to upload" +msgstr "" + +#: files_widget/templates/files_widget/files_widget.html:8 +msgid "Drop an image here to upload" +msgstr "" + +#: files_widget/templates/files_widget/files_widget.html:32 +msgid "Upload" +msgstr "" + +#: files_widget/templates/files_widget/files_widget.html:37 +msgid "Library" +msgstr "" + +#: files_widget/templates/files_widget/files_widget.html:41 +#: files_widget/templates/files_widget/files_widget.html:43 +msgid "Add by url..." +msgstr "" + +#: files_widget/templates/files_widget/files_widget.html:60 +msgid "Images to be removed" +msgstr "" diff --git a/files_widget/locale/zh_Hans/LC_MESSAGES/django.po b/files_widget/locale/zh_Hans/LC_MESSAGES/django.po new file mode 100644 index 0000000..5a25dbf --- /dev/null +++ b/files_widget/locale/zh_Hans/LC_MESSAGES/django.po @@ -0,0 +1,65 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# FIRST AUTHOR , YEAR. +# +#, fuzzy +msgid "" +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2018-10-10 08:51+0800\n" +"PO-Revision-Date: 2018-10-10 08:58+0806\n" +"Last-Translator: b' '\n" +"Language-Team: LANGUAGE \n" +"Language: \n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" +"X-Translated-Using: django-rosetta 0.9.0\n" + +#: files_widget/fields.py:39 +msgid "File" +msgstr "文件" + +#: files_widget/fields.py:60 +msgid "Files" +msgstr "文件" + +#: files_widget/fields.py:78 +msgid "Image" +msgstr "图片" + +#: files_widget/fields.py:87 +msgid "Images" +msgstr "图片" + +#: files_widget/templates/files_widget/files_widget.html:3 +msgid "undo" +msgstr "撤销" + +#: files_widget/templates/files_widget/files_widget.html:6 +msgid "Drop multiple images here to upload" +msgstr "拖动多个图片文件来上传" + +#: files_widget/templates/files_widget/files_widget.html:8 +msgid "Drop an image here to upload" +msgstr "拖动一个图片文件来上传" + +#: files_widget/templates/files_widget/files_widget.html:32 +msgid "Upload" +msgstr "上传" + +#: files_widget/templates/files_widget/files_widget.html:37 +msgid "Library" +msgstr "库" + +#: files_widget/templates/files_widget/files_widget.html:41 +#: files_widget/templates/files_widget/files_widget.html:43 +msgid "Add by url..." +msgstr "添加URL" + +#: files_widget/templates/files_widget/files_widget.html:60 +msgid "Images to be removed" +msgstr "待删除的图片" diff --git a/files_widget/migrations/0001_initial.py b/files_widget/migrations/0001_initial.py new file mode 100644 index 0000000..c32cfdd --- /dev/null +++ b/files_widget/migrations/0001_initial.py @@ -0,0 +1,54 @@ +# Generated by Django 2.1.2 on 2018-10-09 23:37 + +from django.db import migrations, models +import django.db.models.deletion +import files_widget.fields + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('auth', '0009_alter_user_last_name_max_length'), + ] + + operations = [ + migrations.CreateModel( + name='FileIcon', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('extension', models.CharField(blank=True, max_length=100, null=True)), + ('image', files_widget.fields.ImageField(max_length=200)), + ('display_text_overlay', models.BooleanField(default=True)), + ('overlay_text', models.CharField(blank=True, help_text='Leave blank to display file extension', max_length=7, null=True)), + ('base_color', models.CharField(blank=True, max_length=12, null=True)), + ], + ), + migrations.CreateModel( + name='IconSet', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=50, unique=True)), + ('css_path', models.CharField(blank=True, help_text='Optional css file for icon styling', max_length=200, null=True)), + ('active', models.BooleanField(default=True)), + ('priority', models.IntegerField(default=1)), + ('default_icon', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='files_widget.FileIcon')), + ], + ), + migrations.CreateModel( + name='GlobalPermission', + fields=[ + ], + options={ + 'proxy': True, + 'indexes': [], + }, + bases=('auth.permission',), + ), + migrations.AddField( + model_name='fileicon', + name='icon_set', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='files_widget.IconSet'), + ), + ] diff --git a/files_widget/migrations/0002_auto_20221111_1837.py b/files_widget/migrations/0002_auto_20221111_1837.py new file mode 100644 index 0000000..d3453ed --- /dev/null +++ b/files_widget/migrations/0002_auto_20221111_1837.py @@ -0,0 +1,19 @@ +# Generated by Django 2.2.16 on 2022-11-11 18:37 + +from django.db import migrations +import files_widget.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('files_widget', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='fileicon', + name='image', + field=files_widget.fields.ImageField(max_length=200), + ), + ] diff --git a/files_widget/migrations/__init__.py b/files_widget/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/files_widget/models.py b/files_widget/models.py new file mode 100644 index 0000000..e9b276c --- /dev/null +++ b/files_widget/models.py @@ -0,0 +1,55 @@ +from django.db import models +from django.contrib.contenttypes.models import ContentType +from django.contrib.auth.models import Group, Permission + +from .fields import ImageField + + +class GlobalPermissionManager(models.Manager): + def get_query_set(self): + return super(GlobalPermissionManager, self).\ + get_query_set().filter(content_type__name='global_permission') + + +class GlobalPermission(Permission): + """A global permission, not attached to a model""" + + objects = GlobalPermissionManager() + + class Meta: + proxy = True + + def save(self, *args, **kwargs): + ct, created = ContentType.objects.get_or_create( + name="global_permission", app_label=self._meta.app_label + ) + self.content_type = ct + super(GlobalPermission, self).save(*args, **kwargs) + + +try: + permission = GlobalPermission.objects.get_or_create( + codename='can_upload_files', + name='Can Upload Files', + ) +except: + # "Table 'fileswidgettest16.auth_permission' doesn't exist" + # it should exist the next time that this file is loaded + pass + + +class IconSet(models.Model): + name = models.CharField(max_length=50, unique=True) + css_path = models.CharField(max_length=200, blank=True, null=True, help_text='Optional css file for icon styling') + active = models.BooleanField(default=True) + priority = models.IntegerField(default=1) + default_icon = models.ForeignKey('files_widget.FileIcon', null=True, blank=True, on_delete=models.SET_NULL) + + +class FileIcon(models.Model): + icon_set = models.ForeignKey('files_widget.IconSet', on_delete=models.CASCADE) + extension = models.CharField(max_length=100, blank=True, null=True) + image = ImageField() + display_text_overlay = models.BooleanField(default=True) + overlay_text = models.CharField(max_length=7, blank=True, null=True, help_text='Leave blank to display file extension') + base_color = models.CharField(max_length=12, blank=True, null=True) diff --git a/files_widget/static/docs/img/admin-images-widget-drop.jpg b/files_widget/static/docs/img/admin-images-widget-drop.jpg new file mode 100644 index 0000000..74c160e Binary files /dev/null and b/files_widget/static/docs/img/admin-images-widget-drop.jpg differ diff --git a/files_widget/static/docs/img/admin-images-widget-drop.png b/files_widget/static/docs/img/admin-images-widget-drop.png new file mode 100644 index 0000000..0f0b3a5 Binary files /dev/null and b/files_widget/static/docs/img/admin-images-widget-drop.png differ diff --git a/files_widget/static/docs/img/admin-images-widget-progress.jpg b/files_widget/static/docs/img/admin-images-widget-progress.jpg new file mode 100644 index 0000000..a6ab201 Binary files /dev/null and b/files_widget/static/docs/img/admin-images-widget-progress.jpg differ diff --git a/files_widget/static/docs/img/admin-images-widget-progress.png b/files_widget/static/docs/img/admin-images-widget-progress.png new file mode 100644 index 0000000..2f139b0 Binary files /dev/null and b/files_widget/static/docs/img/admin-images-widget-progress.png differ diff --git a/files_widget/static/files_widget/css/widgets.css b/files_widget/static/files_widget/css/widgets.css new file mode 100644 index 0000000..6666baa --- /dev/null +++ b/files_widget/static/files_widget/css/widgets.css @@ -0,0 +1,294 @@ +.files-widget { + display: inline-block; + width: 100%; +} + +.files-widget-dropbox { + width: 100%; + min-height: 100px; + border: 1px solid transparent; + margin: 0 -3px; + display: inline-block; +} + +.files-widget-dropbox.dragging-files { + background: rgba(0, 0, 0, .05); + border: 1px dashed rgba(0, 0, 0, .2); +} + +.files-widget-dropbox.dragging-files.dragover { + background: rgba(0, 0, 0, .1); + border: 1px dashed rgba(0, 0, 0, .4); +} + +.files-widget-dropbox .message { + display: block; + color: #a0a2a4; + margin: 30px; + text-align: center; +} + +/*для файлов*/ +.files-widget-dropbox .preview.filetype, .files-widget-dropbox .sortable-placeholder { + padding: 5px; + display: flex; + vertical-align: middle; + position: relative; + text-align: center; + width: 400px; + float: left; + margin-right: 10px; +} + + +/*для картинок*/ +.files-widget-dropbox .preview, .files-widget-dropbox .sortable-placeholder { + padding: 5px; + display: inline-block; + vertical-align: middle; + position: relative; + text-align: center; +} + +.files-widget-dropbox .sortable-placeholder { + background: rgba(0, 0, 0, .1); +} + +.files-widget .image-holder { + display: inline-block; + position: relative; + border: 3px solid white; + box-shadow: 0 1px 4px rgba(0, 0, 0, .8); + border-radius: 3px; + background: white; + text-align: center; +} + +.files-widget-dropbox .image-holder { + min-height: 50px; + /*min-width: 50px;*/ + line-height: 50px; +} + +.files-widget-dropbox .image-holder.icon100 { + min-height: 100px; + /*min-width: 100px;*/ + line-height: 100px; +} + +.files-widget-dropbox .image-holder.icon30 { + min-height: 30px; + /*min-width: 30px;*/ + line-height: 30px; + margin-right: 10px; +} + +.files-widget-dropbox .preview .thumbnail { + vertical-align: middle; + background: url(/static/files_widget/img/file-icons/file_icon.png) left top; + height: 50px; + /*width: 50px;*/ +} + +.files-widget-dropbox .preview .thumbnail.icon100 { + vertical-align: middle; + background: url(/static/files_widget/img/file-icons/file_icon.png) left top; + height: 100px; + /*width: 100px;*/ +} + +.files-widget-dropbox .preview .thumbnail.icon30 { + vertical-align: middle; + background: url(/static/files_widget/img/file-icons/file_icon.png) left top; + height: 30px; + /*width: 100px;*/ +} + + +.file-name-for-icon100 { + overflow: hidden; + width: 100px; + text-overflow: ellipsis; + white-space: nowrap; + height: 10px; + margin-top: 5px; + font-size: 10px; + +} + + +.file-name-for-icon30 { + overflow: hidden; + width: 100%; + /*text-overflow: ellipsis;*/ + /*white-space: nowrap;*/ + height: 100%; + margin-top: 5px; + font-size: 10px; + text-align: left; +} + + +.files-widget-dropbox .preview.new .thumbnail { + opacity: .5; +} + +.files-widget-dropbox .buttons { + position: absolute; + top: -10px; + right: -10px; + opacity: 0; + white-space: nowrap; + display: block; +} + +.files-widget-dropbox .buttons img { + vertical-align: top; +} + +.files-widget-dropbox .preview:hover .buttons { + opacity: 1; +} + +.files-widget-dropbox .preview.ui-sortable-helper .buttons, + .files-widget-dropbox .preview.new .buttons { + display: none; +} + +.files-widget-dropbox .preview:hover .buttons a { + margin: -2px; +} + +.files-widget-dropbox .uploaded { + position: absolute; + top:0; + left:0; + height:100%; + width:100%; + background: url('../img/done.png') no-repeat center center rgba(255,255,255,0.5); + display: none; +} + +.files-widget-dropbox .preview.done .uploaded { + display: block; +} + +.files-widget-dropbox .filename { + display: block; + position: absolute; + left: 0; + right: 0; + top: 20px; + overflow: hidden; +} + +.files-widget-dropbox .progress-holder { + position: absolute; + background-color: #252f38; + height: 10px; + right: 8px; + left: 8px; + bottom: 8px; + box-shadow: 0 0 2px #000; +} + +.files-widget-dropbox .progress { + background-color: #2586d0; + position: absolute; + height: 100%; + left: 0; + width: 0; + box-shadow: 0 0 1px rgba(255, 255, 255, 0.4) inset; + -moz-transition: 0.25s; + -webkit-transition: 0.25s; + -o-transition: 0.25s; + transition: 0.25s; +} + +.files-widget-dropbox .preview.done .progress { + width:100% !important; +} + +.files-widget .controls { + padding: 8px 0 0; + text-align: left; +} + +.files-widget .controls .fake-files-input { + margin-left: 2px; + position: relative; + display: inline-block; + overflow: hidden; +} + +.files-widget .controls .files-input { + padding: 4px; + opacity: .0001; + position: absolute; + top: 0; + bottom: 0; + right: 0; + cursor: pointer; +} + +.files-widget .controls input[type=text].add-by-url { + width: 300px; +} + +.files-widget .controls .upload-progress-stats { + float: right; + display: block; + padding-top: 5px; + color: #888; +} + +.files-widget-deleted { + +} + +.files-widget-deleted p { + color: #bf3030; + border-bottom: 1px solid #e0e0e0; + padding: 0 1px !important; + margin-top: 10px; + margin-left: 0 !important; +} + +.files-widget-deleted .deleted-list { + border-top: 1px solid white; + padding-top: 5px; +} + +.files-widget-deleted .deleted-file { + height: 38px; + line-height: 38px; + margin-bottom: 5px; +} + +.files-widget-deleted .deleted-file > span { + display: inline-block; + vertical-align: middle; +} + +.files-widget-deleted .deleted-file .image-holder { + min-height: 32px; + min-width: 32px; + line-height: 32px; + margin: 0 5px 0 3px; +} + +.files-widget-deleted .deleted-file .icon { + max-height: 32px; + max-width: 32px; + vertical-align: middle; +} + +.files-widget-deleted .deleted-file .name { + white-space: nowrap; + text-decoration: line-through; + margin: 0 10px; +} + +.files-widget-deleted .deleted-file .undo { + font-weight: bold; +} diff --git a/files_widget/static/files_widget/img/blue_line.jpg b/files_widget/static/files_widget/img/blue_line.jpg new file mode 100644 index 0000000..2af2c9f Binary files /dev/null and b/files_widget/static/files_widget/img/blue_line.jpg differ diff --git a/files_widget/static/files_widget/img/button_template.png b/files_widget/static/files_widget/img/button_template.png new file mode 100644 index 0000000..8f9882e Binary files /dev/null and b/files_widget/static/files_widget/img/button_template.png differ diff --git a/files_widget/static/files_widget/img/close_button.png b/files_widget/static/files_widget/img/close_button.png new file mode 100644 index 0000000..a7c7005 Binary files /dev/null and b/files_widget/static/files_widget/img/close_button.png differ diff --git a/files_widget/static/files_widget/img/done.png b/files_widget/static/files_widget/img/done.png new file mode 100644 index 0000000..22a1410 Binary files /dev/null and b/files_widget/static/files_widget/img/done.png differ diff --git a/files_widget/static/files_widget/img/enlarge_button.png b/files_widget/static/files_widget/img/enlarge_button.png new file mode 100644 index 0000000..a2f0769 Binary files /dev/null and b/files_widget/static/files_widget/img/enlarge_button.png differ diff --git a/files_widget/static/files_widget/img/file-icons/archive.png b/files_widget/static/files_widget/img/file-icons/archive.png new file mode 100644 index 0000000..df21608 Binary files /dev/null and b/files_widget/static/files_widget/img/file-icons/archive.png differ diff --git a/files_widget/static/files_widget/img/file-icons/audio.png b/files_widget/static/files_widget/img/file-icons/audio.png new file mode 100644 index 0000000..0f85565 Binary files /dev/null and b/files_widget/static/files_widget/img/file-icons/audio.png differ diff --git a/files_widget/static/files_widget/img/file-icons/blank.png b/files_widget/static/files_widget/img/file-icons/blank.png new file mode 100644 index 0000000..2de0ea3 Binary files /dev/null and b/files_widget/static/files_widget/img/file-icons/blank.png differ diff --git a/files_widget/static/files_widget/img/file-icons/code.png b/files_widget/static/files_widget/img/file-icons/code.png new file mode 100644 index 0000000..07d389e Binary files /dev/null and b/files_widget/static/files_widget/img/file-icons/code.png differ diff --git a/files_widget/static/files_widget/img/file-icons/executable.png b/files_widget/static/files_widget/img/file-icons/executable.png new file mode 100644 index 0000000..f6c174e Binary files /dev/null and b/files_widget/static/files_widget/img/file-icons/executable.png differ diff --git a/files_widget/static/files_widget/img/file-icons/file_icon.png b/files_widget/static/files_widget/img/file-icons/file_icon.png new file mode 100644 index 0000000..071cf8f Binary files /dev/null and b/files_widget/static/files_widget/img/file-icons/file_icon.png differ diff --git a/files_widget/static/files_widget/img/file-icons/image.png b/files_widget/static/files_widget/img/file-icons/image.png new file mode 100644 index 0000000..9239a2d Binary files /dev/null and b/files_widget/static/files_widget/img/file-icons/image.png differ diff --git a/files_widget/static/files_widget/img/file-icons/settings.png b/files_widget/static/files_widget/img/file-icons/settings.png new file mode 100644 index 0000000..42ce76e Binary files /dev/null and b/files_widget/static/files_widget/img/file-icons/settings.png differ diff --git a/files_widget/static/files_widget/img/file-icons/text.png b/files_widget/static/files_widget/img/file-icons/text.png new file mode 100644 index 0000000..3938565 Binary files /dev/null and b/files_widget/static/files_widget/img/file-icons/text.png differ diff --git a/files_widget/static/files_widget/img/file-icons/typography.png b/files_widget/static/files_widget/img/file-icons/typography.png new file mode 100644 index 0000000..56a7704 Binary files /dev/null and b/files_widget/static/files_widget/img/file-icons/typography.png differ diff --git a/files_widget/static/files_widget/img/file-icons/video.png b/files_widget/static/files_widget/img/file-icons/video.png new file mode 100644 index 0000000..058f0b7 Binary files /dev/null and b/files_widget/static/files_widget/img/file-icons/video.png differ diff --git a/files_widget/static/files_widget/img/transparent-bg.gif b/files_widget/static/files_widget/img/transparent-bg.gif new file mode 100644 index 0000000..c7e98e0 Binary files /dev/null and b/files_widget/static/files_widget/img/transparent-bg.gif differ diff --git a/files_widget/static/files_widget/js/jquery.fileupload-angular.js b/files_widget/static/files_widget/js/jquery.fileupload-angular.js new file mode 100644 index 0000000..e7ba784 --- /dev/null +++ b/files_widget/static/files_widget/js/jquery.fileupload-angular.js @@ -0,0 +1,348 @@ +/* + * jQuery File Upload AngularJS Plugin 1.0.1 + * https://github.com/blueimp/jQuery-File-Upload + * + * Copyright 2013, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * http://www.opensource.org/licenses/MIT + */ + +/*jslint nomen: true, unparam: true */ +/*global angular */ + +(function () { + 'use strict'; + + angular.module('blueimp.fileupload', []) + + .provider('fileUpload', function () { + var scopeApply = function () { + var scope = angular.element(this) + .fileupload('option', 'scope')(); + if (!scope.$$phase) { + scope.$apply(); + } + }, + $config; + $config = this.defaults = { + handleResponse: function (e, data) { + var files = data.result && data.result.files; + if (files) { + data.scope().replace(data.files, files); + } else if (data.errorThrown || + data.textStatus === 'error') { + data.files[0].error = data.errorThrown || + data.textStatus; + } + }, + add: function (e, data) { + var scope = data.scope(); + data.process(function () { + return scope.process(data); + }).always( + function () { + var file = data.files[0], + submit = function () { + return data.submit(); + }; + file.$cancel = function () { + scope.clear(data.files); + return data.abort(); + }; + file.$state = function () { + return data.state(); + }; + file.$progress = function () { + return data.progress(); + }; + file.$response = function () { + return data.response(); + }; + if (file.$state() === 'rejected') { + file._$submit = submit; + } else { + file.$submit = submit; + } + scope.$apply(function () { + var method = scope.option('prependFiles') ? + 'unshift' : 'push'; + Array.prototype[method].apply( + scope.queue, + data.files + ); + if (file.$submit && + (scope.option('autoUpload') || + data.autoUpload) && + data.autoUpload !== false) { + file.$submit(); + } + }); + } + ); + }, + progress: function (e, data) { + data.scope().$apply(); + }, + done: function (e, data) { + var that = this; + data.scope().$apply(function () { + data.handleResponse.call(that, e, data); + }); + }, + fail: function (e, data) { + var that = this; + if (data.errorThrown === 'abort') { + return; + } + if (data.dataType.indexOf('json') === data.dataType.length - 4) { + try { + data.result = angular.fromJson(data.jqXHR.responseText); + } catch (err) {} + } + data.scope().$apply(function () { + data.handleResponse.call(that, e, data); + }); + }, + stop: scopeApply, + processstart: scopeApply, + processstop: scopeApply, + getNumberOfFiles: function () { + return this.scope().queue.length; + }, + dataType: 'json', + prependFiles: true, + autoUpload: false + }; + this.$get = [ + function () { + return { + defaults: $config + }; + } + ]; + }) + + .provider('formatFileSizeFilter', function () { + var $config = this.defaults = { + // Byte units following the IEC format + // http://en.wikipedia.org/wiki/Kilobyte + units: [ + {size: 1000000000, suffix: ' GB'}, + {size: 1000000, suffix: ' MB'}, + {size: 1000, suffix: ' KB'} + ] + }; + this.$get = function () { + return function (bytes) { + if (!angular.isNumber(bytes)) { + return ''; + } + var unit = true, + i = -1; + while (unit) { + unit = $config.units[i += 1]; + if (i === $config.units.length - 1 || bytes >= unit.size) { + return (bytes / unit.size).toFixed(2) + unit.suffix; + } + } + }; + }; + }) + + .controller('FileUploadController', [ + '$scope', '$element', '$attrs', 'fileUpload', + function ($scope, $element, $attrs, fileUpload) { + $scope.disabled = angular.element('') + .prop('disabled'); + $scope.queue = $scope.queue || []; + $scope.clear = function (files) { + var queue = this.queue, + i = queue.length, + file = files, + length = 1; + if (angular.isArray(files)) { + file = files[0]; + length = files.length; + } + while (i) { + if (queue[i -= 1] === file) { + return queue.splice(i, length); + } + } + }; + $scope.replace = function (oldFiles, newFiles) { + var queue = this.queue, + file = oldFiles[0], + i, + j; + for (i = 0; i < queue.length; i += 1) { + if (queue[i] === file) { + for (j = 0; j < newFiles.length; j += 1) { + queue[i + j] = newFiles[j]; + } + return; + } + } + }; + $scope.progress = function () { + return $element.fileupload('progress'); + }; + $scope.active = function () { + return $element.fileupload('active'); + }; + $scope.option = function (option, data) { + return $element.fileupload('option', option, data); + }; + $scope.add = function (data) { + return $element.fileupload('add', data); + }; + $scope.send = function (data) { + return $element.fileupload('send', data); + }; + $scope.process = function (data) { + return $element.fileupload('process', data); + }; + $scope.processing = function (data) { + return $element.fileupload('processing', data); + }; + $scope.applyOnQueue = function (method) { + var list = this.queue.slice(0), + i, + file; + for (i = 0; i < list.length; i += 1) { + file = list[i]; + if (file[method]) { + file[method](); + } + } + }; + $scope.submit = function () { + this.applyOnQueue('$submit'); + }; + $scope.cancel = function () { + this.applyOnQueue('$cancel'); + }; + // The fileupload widget will initialize with + // the options provided via "data-"-parameters, + // as well as those given via options object: + $element.fileupload(angular.extend( + {scope: function () { + return $scope; + }}, + fileUpload.defaults + )).on('fileuploadadd', function (e, data) { + data.scope = $scope.option('scope'); + }).on([ + 'fileuploadadd', + 'fileuploadsubmit', + 'fileuploadsend', + 'fileuploaddone', + 'fileuploadfail', + 'fileuploadalways', + 'fileuploadprogress', + 'fileuploadprogressall', + 'fileuploadstart', + 'fileuploadstop', + 'fileuploadchange', + 'fileuploadpaste', + 'fileuploaddrop', + 'fileuploaddragover', + 'fileuploadchunksend', + 'fileuploadchunkdone', + 'fileuploadchunkfail', + 'fileuploadchunkalways', + 'fileuploadprocessstart', + 'fileuploadprocess', + 'fileuploadprocessdone', + 'fileuploadprocessfail', + 'fileuploadprocessalways', + 'fileuploadprocessstop' + ].join(' '), function (e, data) { + $scope.$emit(e.type, data); + }); + // Observe option changes: + $scope.$watch( + $attrs.fileupload, + function (newOptions, oldOptions) { + if (newOptions) { + $element.fileupload('option', newOptions); + } + } + ); + } + ]) + + .controller('FileUploadProgressController', [ + '$scope', '$attrs', '$parse', + function ($scope, $attrs, $parse) { + var fn = $parse($attrs.progress), + update = function () { + var progress = fn($scope); + if (!progress || !progress.total) { + return; + } + $scope.num = Math.floor( + progress.loaded / progress.total * 100 + ); + }; + update(); + $scope.$watch( + $attrs.progress + '.loaded', + function (newValue, oldValue) { + if (newValue !== oldValue) { + update(); + } + } + ); + } + ]) + + .controller('FileUploadPreviewController', [ + '$scope', '$element', '$attrs', '$parse', + function ($scope, $element, $attrs, $parse) { + var fn = $parse($attrs.preview), + file = fn($scope); + if (file.preview) { + $element.append(file.preview); + } + } + ]) + + .directive('fileupload', function () { + return { + controller: 'FileUploadController' + }; + }) + + .directive('progress', function () { + return { + controller: 'FileUploadProgressController' + }; + }) + + .directive('preview', function () { + return { + controller: 'FileUploadPreviewController' + }; + }) + + .directive('download', function () { + return function (scope, elm, attrs) { + elm.on('dragstart', function (e) { + try { + e.originalEvent.dataTransfer.setData( + 'DownloadURL', + [ + 'application/octet-stream', + elm.prop('download'), + elm.prop('href') + ].join(':') + ); + } catch (err) {} + }); + }; + }); + +}()); diff --git a/files_widget/static/files_widget/js/jquery.fileupload-process.js b/files_widget/static/files_widget/js/jquery.fileupload-process.js new file mode 100644 index 0000000..2f9eeed --- /dev/null +++ b/files_widget/static/files_widget/js/jquery.fileupload-process.js @@ -0,0 +1,158 @@ +/* + * jQuery File Upload Processing Plugin 1.1 + * https://github.com/blueimp/jQuery-File-Upload + * + * Copyright 2012, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * http://www.opensource.org/licenses/MIT + */ + +/*jslint nomen: true, unparam: true */ +/*global define, window */ + +(function (factory) { + 'use strict'; + if (typeof define === 'function' && define.amd) { + // Register as an anonymous AMD module: + define([ + 'jquery', + './jquery.fileupload' + ], factory); + } else { + // Browser globals: + factory( + window.jQuery + ); + } +}(function ($) { + 'use strict'; + + var originalAdd = $.blueimp.fileupload.prototype.options.add; + + // The File Upload Processing plugin extends the fileupload widget + // with file processing functionality: + $.widget('blueimp.fileupload', $.blueimp.fileupload, { + + options: { + // The list of processing actions: + processQueue: [ + /* + { + action: 'log', + type: 'debug' + } + */ + ], + add: function (e, data) { + var $this = $(this); + data.process(function () { + return $this.fileupload('process', data); + }); + originalAdd.call(this, e, data); + } + }, + + processActions: { + /* + log: function (data, options) { + console[options.type]( + 'Processing "' + data.files[data.index].name + '"' + ); + } + */ + }, + + _processFile: function (data) { + var that = this, + dfd = $.Deferred().resolveWith(that, [data]), + chain = dfd.promise(); + this._trigger('process', null, data); + $.each(data.processQueue, function (i, settings) { + var func = function (data) { + return that.processActions[settings.action].call( + that, + data, + settings + ); + }; + chain = chain.pipe(func, settings.always && func); + }); + chain + .done(function () { + that._trigger('processdone', null, data); + that._trigger('processalways', null, data); + }) + .fail(function () { + that._trigger('processfail', null, data); + that._trigger('processalways', null, data); + }); + return chain; + }, + + // Replaces the settings of each processQueue item that + // are strings starting with an "@", using the remaining + // substring as key for the option map, + // e.g. "@autoUpload" is replaced with options.autoUpload: + _transformProcessQueue: function (options) { + var processQueue = []; + $.each(options.processQueue, function () { + var settings = {}; + $.each(this, function (key, value) { + if ($.type(value) === 'string' && + value.charAt(0) === '@') { + settings[key] = options[value.slice(1)]; + } else { + settings[key] = value; + } + }); + processQueue.push(settings); + }); + options.processQueue = processQueue; + }, + + // Returns the number of files currently in the processsing queue: + processing: function () { + return this._processing; + }, + + // Processes the files given as files property of the data parameter, + // returns a Promise object that allows to bind callbacks: + process: function (data) { + var that = this, + options = $.extend({}, this.options, data); + if (options.processQueue && options.processQueue.length) { + this._transformProcessQueue(options); + if (this._processing === 0) { + this._trigger('processstart'); + } + $.each(data.files, function (index, file) { + var opts = index ? $.extend({}, options) : options, + func = function () { + return that._processFile(opts); + }; + opts.index = index; + that._processing += 1; + that._processingQueue = that._processingQueue.pipe(func, func) + .always(function () { + that._processing -= 1; + if (that._processing === 0) { + that._trigger('processstop'); + } + }); + }); + } + return this._processingQueue; + }, + + _create: function () { + this._super(); + this._processing = 0; + this._processingQueue = $.Deferred().resolveWith(this) + .promise(); + } + + }); + +})); diff --git a/files_widget/static/files_widget/js/jquery.fileupload-resize.js b/files_widget/static/files_widget/js/jquery.fileupload-resize.js new file mode 100644 index 0000000..ae5c5be --- /dev/null +++ b/files_widget/static/files_widget/js/jquery.fileupload-resize.js @@ -0,0 +1,212 @@ +/* + * jQuery File Upload Image Resize Plugin 1.1.2 + * https://github.com/blueimp/jQuery-File-Upload + * + * Copyright 2013, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * http://www.opensource.org/licenses/MIT + */ + +/*jslint nomen: true, unparam: true, regexp: true */ +/*global define, window */ + +(function (factory) { + 'use strict'; + if (typeof define === 'function' && define.amd) { + // Register as an anonymous AMD module: + define([ + 'jquery', + 'load-image', + 'canvas-to-blob', + './jquery.fileupload-process' + ], factory); + } else { + // Browser globals: + factory( + window.jQuery, + window.loadImage + ); + } +}(function ($, loadImage) { + 'use strict'; + + // Prepend to the default processQueue: + $.blueimp.fileupload.prototype.options.processQueue.unshift( + { + action: 'loadImage', + fileTypes: '@loadImageFileTypes', + maxFileSize: '@loadImageMaxFileSize', + noRevoke: '@loadImageNoRevoke', + disabled: '@disableImageLoad' + }, + { + action: 'resizeImage', + maxWidth: '@imageMaxWidth', + maxHeight: '@imageMaxHeight', + minWidth: '@imageMinWidth', + minHeight: '@imageMinHeight', + crop: '@imageCrop', + disabled: '@disableImageResize' + }, + { + action: 'saveImage', + disabled: '@disableImageResize' + }, + { + action: 'resizeImage', + maxWidth: '@previewMaxWidth', + maxHeight: '@previewMaxHeight', + minWidth: '@previewMinWidth', + minHeight: '@previewMinHeight', + crop: '@previewCrop', + canvas: '@previewAsCanvas', + disabled: '@disableImagePreview' + }, + { + action: 'setImage', + // The name of the property the resized image + // is saved as on the associated file object: + name: 'preview', + disabled: '@disableImagePreview' + } + ); + + // The File Upload Resize plugin extends the fileupload widget + // with image resize functionality: + $.widget('blueimp.fileupload', $.blueimp.fileupload, { + + options: { + // The regular expression for the types of images to load: + // matched against the file type: + loadImageFileTypes: /^image\/(gif|jpeg|png)$/, + // The maximum file size of images to load: + loadImageMaxFileSize: 5000000, // 5MB + // The maximum width of resized images: + imageMaxWidth: 1920, + // The maximum height of resized images: + imageMaxHeight: 1080, + // Define if resized images should be cropped or only scaled: + imageCrop: false, + // Disable the resize image functionality by default: + disableImageResize: true, + // The maximum width of the preview images: + previewMaxWidth: 80, + // The maximum height of the preview images: + previewMaxHeight: 80, + // Define if preview images should be cropped or only scaled: + previewCrop: false, + // Define if preview images should be resized as canvas elements: + previewAsCanvas: true + }, + + processActions: { + + // Loads the image given via data.files and data.index + // as img element if the browser supports canvas. + // Accepts the options fileTypes (regular expression) + // and maxFileSize (integer) to limit the files to load: + loadImage: function (data, options) { + if (options.disabled) { + return data; + } + var that = this, + file = data.files[data.index], + dfd = $.Deferred(); + if (($.type(options.maxFileSize) === 'number' && + file.size > options.maxFileSize) || + (options.fileTypes && + !options.fileTypes.test(file.type)) || + !loadImage( + file, + function (img) { + if (!img.src) { + return dfd.rejectWith(that, [data]); + } + data.img = img; + dfd.resolveWith(that, [data]); + }, + options + )) { + dfd.rejectWith(that, [data]); + } + return dfd.promise(); + }, + + // Resizes the image given as data.canvas or data.img + // and updates data.canvas or data.img with the resized image. + // Accepts the options maxWidth, maxHeight, minWidth, + // minHeight, canvas and crop: + resizeImage: function (data, options) { + options = $.extend({canvas: true}, options); + var img = (options.canvas && data.canvas) || data.img, + canvas; + if (img && !options.disabled) { + canvas = loadImage.scale(img, options); + if (canvas && (canvas.width !== img.width || + canvas.height !== img.height)) { + data[canvas.getContext ? 'canvas' : 'img'] = canvas; + } + } + return data; + }, + + // Saves the processed image given as data.canvas + // inplace at data.index of data.files: + saveImage: function (data, options) { + if (!data.canvas || options.disabled) { + return data; + } + var that = this, + file = data.files[data.index], + name = file.name, + dfd = $.Deferred(), + callback = function (blob) { + if (!blob.name) { + if (file.type === blob.type) { + blob.name = file.name; + } else if (file.name) { + blob.name = file.name.replace( + /\..+$/, + '.' + blob.type.substr(6) + ); + } + } + // Store the created blob at the position + // of the original file in the files list: + data.files[data.index] = blob; + dfd.resolveWith(that, [data]); + }; + // Use canvas.mozGetAsFile directly, to retain the filename, as + // Gecko doesn't support the filename option for FormData.append: + if (data.canvas.mozGetAsFile) { + callback(data.canvas.mozGetAsFile( + (/^image\/(jpeg|png)$/.test(file.type) && name) || + ((name && name.replace(/\..+$/, '')) || + 'blob') + '.png', + file.type + )); + } else if (data.canvas.toBlob) { + data.canvas.toBlob(callback, file.type); + } else { + return data; + } + return dfd.promise(); + }, + + // Sets the resized version of the image as a property of the + // file object, must be called after "saveImage": + setImage: function (data, options) { + var img = data.canvas || data.img; + if (img && !options.disabled) { + data.files[data.index][options.name] = img; + } + return data; + } + + } + + }); + +})); diff --git a/files_widget/static/files_widget/js/jquery.fileupload-ui.js b/files_widget/static/files_widget/js/jquery.fileupload-ui.js new file mode 100644 index 0000000..5d22346 --- /dev/null +++ b/files_widget/static/files_widget/js/jquery.fileupload-ui.js @@ -0,0 +1,633 @@ +/* + * jQuery File Upload User Interface Plugin 8.2.1 + * https://github.com/blueimp/jQuery-File-Upload + * + * Copyright 2010, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * http://www.opensource.org/licenses/MIT + */ + +/*jslint nomen: true, unparam: true, regexp: true */ +/*global define, window, URL, webkitURL, FileReader */ + +(function (factory) { + 'use strict'; + if (typeof define === 'function' && define.amd) { + // Register as an anonymous AMD module: + define([ + 'jquery', + 'tmpl', + './jquery.fileupload-resize', + './jquery.fileupload-validate' + ], factory); + } else { + // Browser globals: + factory( + window.jQuery, + window.tmpl + ); + } +}(function ($, tmpl, loadImage) { + 'use strict'; + + $.blueimp.fileupload.prototype._specialOptions.push( + 'filesContainer', + 'uploadTemplateId', + 'downloadTemplateId' + ); + + // The UI version extends the file upload widget + // and adds complete user interface interaction: + $.widget('blueimp.fileupload', $.blueimp.fileupload, { + + options: { + // By default, files added to the widget are uploaded as soon + // as the user clicks on the start buttons. To enable automatic + // uploads, set the following option to true: + autoUpload: false, + // The ID of the upload template: + uploadTemplateId: 'template-upload', + // The ID of the download template: + downloadTemplateId: 'template-download', + // The container for the list of files. If undefined, it is set to + // an element with class "files" inside of the widget element: + filesContainer: undefined, + // By default, files are appended to the files container. + // Set the following option to true, to prepend files instead: + prependFiles: false, + // The expected data type of the upload response, sets the dataType + // option of the $.ajax upload requests: + dataType: 'json', + + // Function returning the current number of files, + // used by the maxNumberOfFiles validation: + getNumberOfFiles: function () { + return this.filesContainer.children().length; + }, + + // Callback to retrieve the list of files from the server response: + getFilesFromResponse: function (data) { + if (data.result && $.isArray(data.result.files)) { + return data.result.files; + } + return []; + }, + + // The add callback is invoked as soon as files are added to the fileupload + // widget (via file input selection, drag & drop or add API call). + // See the basic file upload widget for more information: + add: function (e, data) { + var $this = $(this), + that = $this.data('blueimp-fileupload') || + $this.data('fileupload'), + options = that.options, + files = data.files; + data.process(function () { + return $this.fileupload('process', data); + }).always(function () { + data.context = that._renderUpload(files).data('data', data); + that._renderPreviews(data); + options.filesContainer[ + options.prependFiles ? 'prepend' : 'append' + ](data.context); + that._forceReflow(data.context); + that._transition(data.context).done( + function () { + if ((that._trigger('added', e, data) !== false) && + (options.autoUpload || data.autoUpload) && + data.autoUpload !== false && !data.files.error) { + data.submit(); + } + } + ); + }); + }, + // Callback for the start of each file upload request: + send: function (e, data) { + var that = $(this).data('blueimp-fileupload') || + $(this).data('fileupload'); + if (data.context && data.dataType && + data.dataType.substr(0, 6) === 'iframe') { + // Iframe Transport does not support progress events. + // In lack of an indeterminate progress bar, we set + // the progress to 100%, showing the full animated bar: + data.context + .find('.progress').addClass( + !$.support.transition && 'progress-animated' + ) + .attr('aria-valuenow', 100) + .find('.bar').css( + 'width', + '100%' + ); + } + return that._trigger('sent', e, data); + }, + // Callback for successful uploads: + done: function (e, data) { + var that = $(this).data('blueimp-fileupload') || + $(this).data('fileupload'), + getFilesFromResponse = data.getFilesFromResponse || + that.options.getFilesFromResponse, + files = getFilesFromResponse(data), + template, + deferred; + if (data.context) { + data.context.each(function (index) { + var file = files[index] || + {error: 'Empty file upload result'}, + deferred = that._addFinishedDeferreds(); + that._transition($(this)).done( + function () { + var node = $(this); + template = that._renderDownload([file]) + .replaceAll(node); + that._forceReflow(template); + that._transition(template).done( + function () { + data.context = $(this); + that._trigger('completed', e, data); + that._trigger('finished', e, data); + deferred.resolve(); + } + ); + } + ); + }); + } else { + template = that._renderDownload(files) + .appendTo(that.options.filesContainer); + that._forceReflow(template); + deferred = that._addFinishedDeferreds(); + that._transition(template).done( + function () { + data.context = $(this); + that._trigger('completed', e, data); + that._trigger('finished', e, data); + deferred.resolve(); + } + ); + } + }, + // Callback for failed (abort or error) uploads: + fail: function (e, data) { + var that = $(this).data('blueimp-fileupload') || + $(this).data('fileupload'), + template, + deferred; + if (data.context) { + data.context.each(function (index) { + if (data.errorThrown !== 'abort') { + var file = data.files[index]; + file.error = file.error || data.errorThrown || + true; + deferred = that._addFinishedDeferreds(); + that._transition($(this)).done( + function () { + var node = $(this); + template = that._renderDownload([file]) + .replaceAll(node); + that._forceReflow(template); + that._transition(template).done( + function () { + data.context = $(this); + that._trigger('failed', e, data); + that._trigger('finished', e, data); + deferred.resolve(); + } + ); + } + ); + } else { + deferred = that._addFinishedDeferreds(); + that._transition($(this)).done( + function () { + $(this).remove(); + that._trigger('failed', e, data); + that._trigger('finished', e, data); + deferred.resolve(); + } + ); + } + }); + } else if (data.errorThrown !== 'abort') { + data.context = that._renderUpload(data.files) + .appendTo(that.options.filesContainer) + .data('data', data); + that._forceReflow(data.context); + deferred = that._addFinishedDeferreds(); + that._transition(data.context).done( + function () { + data.context = $(this); + that._trigger('failed', e, data); + that._trigger('finished', e, data); + deferred.resolve(); + } + ); + } else { + that._trigger('failed', e, data); + that._trigger('finished', e, data); + that._addFinishedDeferreds().resolve(); + } + }, + // Callback for upload progress events: + progress: function (e, data) { + if (data.context) { + var progress = Math.floor(data.loaded / data.total * 100); + data.context.find('.progress') + .attr('aria-valuenow', progress) + .find('.bar').css( + 'width', + progress + '%' + ); + } + }, + // Callback for global upload progress events: + progressall: function (e, data) { + var $this = $(this), + progress = Math.floor(data.loaded / data.total * 100), + globalProgressNode = $this.find('.fileupload-progress'), + extendedProgressNode = globalProgressNode + .find('.progress-extended'); + if (extendedProgressNode.length) { + extendedProgressNode.html( + ($this.data('blueimp-fileupload') || $this.data('fileupload')) + ._renderExtendedProgress(data) + ); + } + globalProgressNode + .find('.progress') + .attr('aria-valuenow', progress) + .find('.bar').css( + 'width', + progress + '%' + ); + }, + // Callback for uploads start, equivalent to the global ajaxStart event: + start: function (e) { + var that = $(this).data('blueimp-fileupload') || + $(this).data('fileupload'); + that._resetFinishedDeferreds(); + that._transition($(this).find('.fileupload-progress')).done( + function () { + that._trigger('started', e); + } + ); + }, + // Callback for uploads stop, equivalent to the global ajaxStop event: + stop: function (e) { + var that = $(this).data('blueimp-fileupload') || + $(this).data('fileupload'), + deferred = that._addFinishedDeferreds(); + $.when.apply($, that._getFinishedDeferreds()) + .done(function () { + that._trigger('stopped', e); + }); + that._transition($(this).find('.fileupload-progress')).done( + function () { + $(this).find('.progress') + .attr('aria-valuenow', '0') + .find('.bar').css('width', '0%'); + $(this).find('.progress-extended').html(' '); + deferred.resolve(); + } + ); + }, + processstart: function () { + $(this).addClass('fileupload-processing'); + }, + processstop: function () { + $(this).removeClass('fileupload-processing'); + }, + // Callback for file deletion: + destroy: function (e, data) { + var that = $(this).data('blueimp-fileupload') || + $(this).data('fileupload'); + if (data.url) { + $.ajax(data).done(function () { + that._transition(data.context).done( + function () { + $(this).remove(); + that._trigger('destroyed', e, data); + } + ); + }); + } + } + }, + + _resetFinishedDeferreds: function () { + this._finishedUploads = []; + }, + + _addFinishedDeferreds: function (deferred) { + if (!deferred) { + deferred = $.Deferred(); + } + this._finishedUploads.push(deferred); + return deferred; + }, + + _getFinishedDeferreds: function () { + return this._finishedUploads; + }, + + // Link handler, that allows to download files + // by drag & drop of the links to the desktop: + _enableDragToDesktop: function () { + var link = $(this), + url = link.prop('href'), + name = link.prop('download'), + type = 'application/octet-stream'; + link.bind('dragstart', function (e) { + try { + e.originalEvent.dataTransfer.setData( + 'DownloadURL', + [type, name, url].join(':') + ); + } catch (ignore) {} + }); + }, + + _formatFileSize: function (bytes) { + if (typeof bytes !== 'number') { + return ''; + } + if (bytes >= 1000000000) { + return (bytes / 1000000000).toFixed(2) + ' GB'; + } + if (bytes >= 1000000) { + return (bytes / 1000000).toFixed(2) + ' MB'; + } + return (bytes / 1000).toFixed(2) + ' KB'; + }, + + _formatBitrate: function (bits) { + if (typeof bits !== 'number') { + return ''; + } + if (bits >= 1000000000) { + return (bits / 1000000000).toFixed(2) + ' Gbit/s'; + } + if (bits >= 1000000) { + return (bits / 1000000).toFixed(2) + ' Mbit/s'; + } + if (bits >= 1000) { + return (bits / 1000).toFixed(2) + ' kbit/s'; + } + return bits.toFixed(2) + ' bit/s'; + }, + + _formatTime: function (seconds) { + var date = new Date(seconds * 1000), + days = Math.floor(seconds / 86400); + days = days ? days + 'd ' : ''; + return days + + ('0' + date.getUTCHours()).slice(-2) + ':' + + ('0' + date.getUTCMinutes()).slice(-2) + ':' + + ('0' + date.getUTCSeconds()).slice(-2); + }, + + _formatPercentage: function (floatValue) { + return (floatValue * 100).toFixed(2) + ' %'; + }, + + _renderExtendedProgress: function (data) { + return this._formatBitrate(data.bitrate) + ' | ' + + this._formatTime( + (data.total - data.loaded) * 8 / data.bitrate + ) + ' | ' + + this._formatPercentage( + data.loaded / data.total + ) + ' | ' + + this._formatFileSize(data.loaded) + ' / ' + + this._formatFileSize(data.total); + }, + + _renderTemplate: function (func, files) { + if (!func) { + return $(); + } + var result = func({ + files: files, + formatFileSize: this._formatFileSize, + options: this.options + }); + if (result instanceof $) { + return result; + } + return $(this.options.templatesContainer).html(result).children(); + }, + + _renderPreviews: function (data) { + data.context.find('.preview').each(function (index, elm) { + $(elm).append(data.files[index].preview); + }); + }, + + _renderUpload: function (files) { + return this._renderTemplate( + this.options.uploadTemplate, + files + ); + }, + + _renderDownload: function (files) { + return this._renderTemplate( + this.options.downloadTemplate, + files + ).find('a[download]').each(this._enableDragToDesktop).end(); + }, + + _startHandler: function (e) { + e.preventDefault(); + var button = $(e.currentTarget), + template = button.closest('.template-upload'), + data = template.data('data'); + if (data && data.submit && !data.jqXHR && data.submit()) { + button.prop('disabled', true); + } + }, + + _cancelHandler: function (e) { + e.preventDefault(); + var template = $(e.currentTarget).closest('.template-upload'), + data = template.data('data') || {}; + if (!data.jqXHR) { + data.errorThrown = 'abort'; + this._trigger('fail', e, data); + } else { + data.jqXHR.abort(); + } + }, + + _deleteHandler: function (e) { + e.preventDefault(); + var button = $(e.currentTarget); + this._trigger('destroy', e, $.extend({ + context: button.closest('.template-download'), + type: 'DELETE' + }, button.data())); + }, + + _forceReflow: function (node) { + return $.support.transition && node.length && + node[0].offsetWidth; + }, + + _transition: function (node) { + var dfd = $.Deferred(); + if ($.support.transition && node.hasClass('fade') && node.is(':visible')) { + node.bind( + $.support.transition.end, + function (e) { + // Make sure we don't respond to other transitions events + // in the container element, e.g. from button elements: + if (e.target === node[0]) { + node.unbind($.support.transition.end); + dfd.resolveWith(node); + } + } + ).toggleClass('in'); + } else { + node.toggleClass('in'); + dfd.resolveWith(node); + } + return dfd; + }, + + _initButtonBarEventHandlers: function () { + var fileUploadButtonBar = this.element.find('.fileupload-buttonbar'), + filesList = this.options.filesContainer; + this._on(fileUploadButtonBar.find('.start'), { + click: function (e) { + e.preventDefault(); + filesList.find('.start').click(); + } + }); + this._on(fileUploadButtonBar.find('.cancel'), { + click: function (e) { + e.preventDefault(); + filesList.find('.cancel').click(); + } + }); + this._on(fileUploadButtonBar.find('.delete'), { + click: function (e) { + e.preventDefault(); + filesList.find('.toggle:checked') + .closest('.template-download') + .find('.delete').click(); + fileUploadButtonBar.find('.toggle') + .prop('checked', false); + } + }); + this._on(fileUploadButtonBar.find('.toggle'), { + change: function (e) { + filesList.find('.toggle').prop( + 'checked', + $(e.currentTarget).is(':checked') + ); + } + }); + }, + + _destroyButtonBarEventHandlers: function () { + this._off( + this.element.find('.fileupload-buttonbar') + .find('.start, .cancel, .delete'), + 'click' + ); + this._off( + this.element.find('.fileupload-buttonbar .toggle'), + 'change.' + ); + }, + + _initEventHandlers: function () { + this._super(); + this._on(this.options.filesContainer, { + 'click .start': this._startHandler, + 'click .cancel': this._cancelHandler, + 'click .delete': this._deleteHandler + }); + this._initButtonBarEventHandlers(); + }, + + _destroyEventHandlers: function () { + this._destroyButtonBarEventHandlers(); + this._off(this.options.filesContainer, 'click'); + this._super(); + }, + + _enableFileInputButton: function () { + this.element.find('.fileinput-button input') + .prop('disabled', false) + .parent().removeClass('disabled'); + }, + + _disableFileInputButton: function () { + this.element.find('.fileinput-button input') + .prop('disabled', true) + .parent().addClass('disabled'); + }, + + _initTemplates: function () { + var options = this.options; + options.templatesContainer = this.document[0].createElement( + options.filesContainer.prop('nodeName') + ); + if (tmpl) { + if (options.uploadTemplateId) { + options.uploadTemplate = tmpl(options.uploadTemplateId); + } + if (options.downloadTemplateId) { + options.downloadTemplate = tmpl(options.downloadTemplateId); + } + } + }, + + _initFilesContainer: function () { + var options = this.options; + if (options.filesContainer === undefined) { + options.filesContainer = this.element.find('.files'); + } else if (!(options.filesContainer instanceof $)) { + options.filesContainer = $(options.filesContainer); + } + }, + + _initSpecialOptions: function () { + this._super(); + this._initFilesContainer(); + this._initTemplates(); + }, + + _create: function () { + this._super(); + this._resetFinishedDeferreds(); + }, + + enable: function () { + var wasDisabled = false; + if (this.options.disabled) { + wasDisabled = true; + } + this._super(); + if (wasDisabled) { + this.element.find('input, button').prop('disabled', false); + this._enableFileInputButton(); + } + }, + + disable: function () { + if (!this.options.disabled) { + this.element.find('input, button').prop('disabled', true); + this._disableFileInputButton(); + } + this._super(); + } + + }); + +})); diff --git a/files_widget/static/files_widget/js/jquery.fileupload-validate.js b/files_widget/static/files_widget/js/jquery.fileupload-validate.js new file mode 100644 index 0000000..2599da8 --- /dev/null +++ b/files_widget/static/files_widget/js/jquery.fileupload-validate.js @@ -0,0 +1,116 @@ +/* + * jQuery File Upload Validation Plugin 1.0.2 + * https://github.com/blueimp/jQuery-File-Upload + * + * Copyright 2013, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * http://www.opensource.org/licenses/MIT + */ + +/*jslint nomen: true, unparam: true, regexp: true */ +/*global define, window */ + +(function (factory) { + 'use strict'; + if (typeof define === 'function' && define.amd) { + // Register as an anonymous AMD module: + define([ + 'jquery', + './jquery.fileupload-process' + ], factory); + } else { + // Browser globals: + factory( + window.jQuery + ); + } +}(function ($) { + 'use strict'; + + // Append to the default processQueue: + $.blueimp.fileupload.prototype.options.processQueue.push( + { + action: 'validate', + // Always trigger this action, + // even if the previous action was rejected: + always: true, + // Options taken from the global options map: + acceptFileTypes: '@acceptFileTypes', + maxFileSize: '@maxFileSize', + minFileSize: '@minFileSize', + maxNumberOfFiles: '@maxNumberOfFiles', + disabled: '@disableValidation' + } + ); + + // The File Upload Validation plugin extends the fileupload widget + // with file validation functionality: + $.widget('blueimp.fileupload', $.blueimp.fileupload, { + + options: { + /* + // The regular expression for allowed file types, matches + // against either file type or file name: + acceptFileTypes: /(\.|\/)(gif|jpe?g|png)$/i, + // The maximum allowed file size in bytes: + maxFileSize: 10000000, // 10 MB + // The minimum allowed file size in bytes: + minFileSize: undefined, // No minimal file size + // The limit of files to be uploaded: + maxNumberOfFiles: 10, + */ + + // Function returning the current number of files, + // has to be overriden for maxNumberOfFiles validation: + getNumberOfFiles: $.noop, + + // Error and info messages: + messages: { + maxNumberOfFiles: 'Maximum number of files exceeded', + acceptFileTypes: 'File type not allowed', + maxFileSize: 'File is too large', + minFileSize: 'File is too small' + } + }, + + processActions: { + + validate: function (data, options) { + if (options.disabled) { + return data; + } + var dfd = $.Deferred(), + settings = this.options, + file = data.files[data.index], + numberOfFiles = settings.getNumberOfFiles(); + if (numberOfFiles && $.type(options.maxNumberOfFiles) === 'number' && + numberOfFiles + data.files.length > options.maxNumberOfFiles) { + file.error = settings.i18n('maxNumberOfFiles'); + } else if (options.acceptFileTypes && + !(options.acceptFileTypes.test(file.type) || + options.acceptFileTypes.test(file.name))) { + file.error = settings.i18n('acceptFileTypes'); + } else if (options.maxFileSize && file.size > options.maxFileSize) { + file.error = settings.i18n('maxFileSize'); + } else if ($.type(file.size) === 'number' && + file.size < options.minFileSize) { + file.error = settings.i18n('minFileSize'); + } else { + delete file.error; + } + if (file.error || data.files.error) { + data.files.error = true; + dfd.rejectWith(this, [data]); + } else { + dfd.resolveWith(this, [data]); + } + return dfd.promise(); + } + + } + + }); + +})); diff --git a/files_widget/static/files_widget/js/jquery.fileupload.js b/files_widget/static/files_widget/js/jquery.fileupload.js new file mode 100644 index 0000000..6956da5 --- /dev/null +++ b/files_widget/static/files_widget/js/jquery.fileupload.js @@ -0,0 +1,1301 @@ +/* + * jQuery File Upload Plugin 5.31.2 + * https://github.com/blueimp/jQuery-File-Upload + * + * Copyright 2010, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * http://www.opensource.org/licenses/MIT + */ + +/*jslint nomen: true, unparam: true, regexp: true */ +/*global define, window, document, File, Blob, FormData, location */ + +(function (factory) { + 'use strict'; + if (typeof define === 'function' && define.amd) { + // Register as an anonymous AMD module: + define([ + 'jquery', + 'jquery.ui.widget' + ], factory); + } else { + // Browser globals: + factory(window.jQuery); + } +}(function ($) { + 'use strict'; + + // The FileReader API is not actually used, but works as feature detection, + // as e.g. Safari supports XHR file uploads via the FormData API, + // but not non-multipart XHR file uploads: + $.support.xhrFileUpload = !!(window.XMLHttpRequestUpload && window.FileReader); + $.support.xhrFormDataFileUpload = !!window.FormData; + + // The fileupload widget listens for change events on file input fields defined + // via fileInput setting and paste or drop events of the given dropZone. + // In addition to the default jQuery Widget methods, the fileupload widget + // exposes the "add" and "send" methods, to add or directly send files using + // the fileupload API. + // By default, files added via file input selection, paste, drag & drop or + // "add" method are uploaded immediately, but it is possible to override + // the "add" callback option to queue file uploads. + $.widget('blueimp.fileupload', { + + options: { + // The drop target element(s), by the default the complete document. + // Set to null to disable drag & drop support: + dropZone: $(document), + // The paste target element(s), by the default the complete document. + // Set to null to disable paste support: + pasteZone: $(document), + // The file input field(s), that are listened to for change events. + // If undefined, it is set to the file input fields inside + // of the widget element on plugin initialization. + // Set to null to disable the change listener. + fileInput: undefined, + // By default, the file input field is replaced with a clone after + // each input field change event. This is required for iframe transport + // queues and allows change events to be fired for the same file + // selection, but can be disabled by setting the following option to false: + replaceFileInput: true, + // The parameter name for the file form data (the request argument name). + // If undefined or empty, the name property of the file input field is + // used, or "files[]" if the file input name property is also empty, + // can be a string or an array of strings: + paramName: undefined, + // By default, each file of a selection is uploaded using an individual + // request for XHR type uploads. Set to false to upload file + // selections in one request each: + singleFileUploads: true, + // To limit the number of files uploaded with one XHR request, + // set the following option to an integer greater than 0: + limitMultiFileUploads: undefined, + // Set the following option to true to issue all file upload requests + // in a sequential order: + sequentialUploads: false, + // To limit the number of concurrent uploads, + // set the following option to an integer greater than 0: + limitConcurrentUploads: undefined, + // Set the following option to true to force iframe transport uploads: + forceIframeTransport: false, + // Set the following option to the location of a redirect url on the + // origin server, for cross-domain iframe transport uploads: + redirect: undefined, + // The parameter name for the redirect url, sent as part of the form + // data and set to 'redirect' if this option is empty: + redirectParamName: undefined, + // Set the following option to the location of a postMessage window, + // to enable postMessage transport uploads: + postMessage: undefined, + // By default, XHR file uploads are sent as multipart/form-data. + // The iframe transport is always using multipart/form-data. + // Set to false to enable non-multipart XHR uploads: + multipart: true, + // To upload large files in smaller chunks, set the following option + // to a preferred maximum chunk size. If set to 0, null or undefined, + // or the browser does not support the required Blob API, files will + // be uploaded as a whole. + maxChunkSize: undefined, + // When a non-multipart upload or a chunked multipart upload has been + // aborted, this option can be used to resume the upload by setting + // it to the size of the already uploaded bytes. This option is most + // useful when modifying the options object inside of the "add" or + // "send" callbacks, as the options are cloned for each file upload. + uploadedBytes: undefined, + // By default, failed (abort or error) file uploads are removed from the + // global progress calculation. Set the following option to false to + // prevent recalculating the global progress data: + recalculateProgress: true, + // Interval in milliseconds to calculate and trigger progress events: + progressInterval: 100, + // Interval in milliseconds to calculate progress bitrate: + bitrateInterval: 500, + // By default, uploads are started automatically when adding files: + autoUpload: true, + + // Error and info messages: + messages: { + uploadedBytes: 'Uploaded bytes exceed file size' + }, + + // Translation function, gets the message key to be translated + // and an object with context specific data as arguments: + i18n: function (message, context) { + message = this.messages[message] || message.toString(); + if (context) { + $.each(context, function (key, value) { + message = message.replace('{' + key + '}', value); + }); + } + return message; + }, + + // Additional form data to be sent along with the file uploads can be set + // using this option, which accepts an array of objects with name and + // value properties, a function returning such an array, a FormData + // object (for XHR file uploads), or a simple object. + // The form of the first fileInput is given as parameter to the function: + formData: function (form) { + return form.serializeArray(); + }, + + // The add callback is invoked as soon as files are added to the fileupload + // widget (via file input selection, drag & drop, paste or add API call). + // If the singleFileUploads option is enabled, this callback will be + // called once for each file in the selection for XHR file uplaods, else + // once for each file selection. + // The upload starts when the submit method is invoked on the data parameter. + // The data object contains a files property holding the added files + // and allows to override plugin options as well as define ajax settings. + // Listeners for this callback can also be bound the following way: + // .bind('fileuploadadd', func); + // data.submit() returns a Promise object and allows to attach additional + // handlers using jQuery's Deferred callbacks: + // data.submit().done(func).fail(func).always(func); + add: function (e, data) { + if (data.autoUpload || (data.autoUpload !== false && + $(this).fileupload('option', 'autoUpload'))) { + data.process().done(function () { + data.submit(); + }); + } + }, + + // Other callbacks: + + // Callback for the submit event of each file upload: + // submit: function (e, data) {}, // .bind('fileuploadsubmit', func); + + // Callback for the start of each file upload request: + // send: function (e, data) {}, // .bind('fileuploadsend', func); + + // Callback for successful uploads: + // done: function (e, data) {}, // .bind('fileuploaddone', func); + + // Callback for failed (abort or error) uploads: + // fail: function (e, data) {}, // .bind('fileuploadfail', func); + + // Callback for completed (success, abort or error) requests: + // always: function (e, data) {}, // .bind('fileuploadalways', func); + + // Callback for upload progress events: + // progress: function (e, data) {}, // .bind('fileuploadprogress', func); + + // Callback for global upload progress events: + // progressall: function (e, data) {}, // .bind('fileuploadprogressall', func); + + // Callback for uploads start, equivalent to the global ajaxStart event: + // start: function (e) {}, // .bind('fileuploadstart', func); + + // Callback for uploads stop, equivalent to the global ajaxStop event: + // stop: function (e) {}, // .bind('fileuploadstop', func); + + // Callback for change events of the fileInput(s): + // change: function (e, data) {}, // .bind('fileuploadchange', func); + + // Callback for paste events to the pasteZone(s): + // paste: function (e, data) {}, // .bind('fileuploadpaste', func); + + // Callback for drop events of the dropZone(s): + // drop: function (e, data) {}, // .bind('fileuploaddrop', func); + + // Callback for dragover events of the dropZone(s): + // dragover: function (e) {}, // .bind('fileuploaddragover', func); + + // Callback for the start of each chunk upload request: + // chunksend: function (e, data) {}, // .bind('fileuploadchunksend', func); + + // Callback for successful chunk uploads: + // chunkdone: function (e, data) {}, // .bind('fileuploadchunkdone', func); + + // Callback for failed (abort or error) chunk uploads: + // chunkfail: function (e, data) {}, // .bind('fileuploadchunkfail', func); + + // Callback for completed (success, abort or error) chunk upload requests: + // chunkalways: function (e, data) {}, // .bind('fileuploadchunkalways', func); + + // The plugin options are used as settings object for the ajax calls. + // The following are jQuery ajax settings required for the file uploads: + processData: false, + contentType: false, + cache: false + }, + + // A list of options that require reinitializing event listeners and/or + // special initialization code: + _specialOptions: [ + 'fileInput', + 'dropZone', + 'pasteZone', + 'multipart', + 'forceIframeTransport' + ], + + _BitrateTimer: function () { + this.timestamp = ((Date.now) ? Date.now() : (new Date()).getTime()); + this.loaded = 0; + this.bitrate = 0; + this.getBitrate = function (now, loaded, interval) { + var timeDiff = now - this.timestamp; + if (!this.bitrate || !interval || timeDiff > interval) { + this.bitrate = (loaded - this.loaded) * (1000 / timeDiff) * 8; + this.loaded = loaded; + this.timestamp = now; + } + return this.bitrate; + }; + }, + + _isXHRUpload: function (options) { + return !options.forceIframeTransport && + ((!options.multipart && $.support.xhrFileUpload) || + $.support.xhrFormDataFileUpload); + }, + + _getFormData: function (options) { + var formData; + if (typeof options.formData === 'function') { + return options.formData(options.form); + } + if ($.isArray(options.formData)) { + return options.formData; + } + if ($.type(options.formData) === 'object') { + formData = []; + $.each(options.formData, function (name, value) { + formData.push({name: name, value: value}); + }); + return formData; + } + return []; + }, + + _getTotal: function (files) { + var total = 0; + $.each(files, function (index, file) { + total += file.size || 1; + }); + return total; + }, + + _initProgressObject: function (obj) { + var progress = { + loaded: 0, + total: 0, + bitrate: 0 + }; + if (obj._progress) { + $.extend(obj._progress, progress); + } else { + obj._progress = progress; + } + }, + + _initResponseObject: function (obj) { + var prop; + if (obj._response) { + for (prop in obj._response) { + if (obj._response.hasOwnProperty(prop)) { + delete obj._response[prop]; + } + } + } else { + obj._response = {}; + } + }, + + _onProgress: function (e, data) { + if (e.lengthComputable) { + var now = ((Date.now) ? Date.now() : (new Date()).getTime()), + loaded; + if (data._time && data.progressInterval && + (now - data._time < data.progressInterval) && + e.loaded !== e.total) { + return; + } + data._time = now; + loaded = Math.floor( + e.loaded / e.total * (data.chunkSize || data._progress.total) + ) + (data.uploadedBytes || 0); + // Add the difference from the previously loaded state + // to the global loaded counter: + this._progress.loaded += (loaded - data._progress.loaded); + this._progress.bitrate = this._bitrateTimer.getBitrate( + now, + this._progress.loaded, + data.bitrateInterval + ); + data._progress.loaded = data.loaded = loaded; + data._progress.bitrate = data.bitrate = data._bitrateTimer.getBitrate( + now, + loaded, + data.bitrateInterval + ); + // Trigger a custom progress event with a total data property set + // to the file size(s) of the current upload and a loaded data + // property calculated accordingly: + this._trigger('progress', e, data); + // Trigger a global progress event for all current file uploads, + // including ajax calls queued for sequential file uploads: + this._trigger('progressall', e, this._progress); + } + }, + + _initProgressListener: function (options) { + var that = this, + xhr = options.xhr ? options.xhr() : $.ajaxSettings.xhr(); + // Accesss to the native XHR object is required to add event listeners + // for the upload progress event: + if (xhr.upload) { + $(xhr.upload).bind('progress', function (e) { + var oe = e.originalEvent; + // Make sure the progress event properties get copied over: + e.lengthComputable = oe.lengthComputable; + e.loaded = oe.loaded; + e.total = oe.total; + that._onProgress(e, options); + }); + options.xhr = function () { + return xhr; + }; + } + }, + + _isInstanceOf: function (type, obj) { + // Cross-frame instanceof check + return Object.prototype.toString.call(obj) === '[object ' + type + ']'; + }, + + _initXHRData: function (options) { + var that = this, + formData, + file = options.files[0], + // Ignore non-multipart setting if not supported: + multipart = options.multipart || !$.support.xhrFileUpload, + paramName = options.paramName[0]; + options.headers = options.headers || {}; + if (options.contentRange) { + options.headers['Content-Range'] = options.contentRange; + } + if (!multipart) { + options.headers['Content-Disposition'] = 'attachment; filename="' + + encodeURI(file.name) + '"'; + options.contentType = file.type; + options.data = options.blob || file; + } else if ($.support.xhrFormDataFileUpload) { + if (options.postMessage) { + // window.postMessage does not allow sending FormData + // objects, so we just add the File/Blob objects to + // the formData array and let the postMessage window + // create the FormData object out of this array: + formData = this._getFormData(options); + if (options.blob) { + formData.push({ + name: paramName, + value: options.blob + }); + } else { + $.each(options.files, function (index, file) { + formData.push({ + name: options.paramName[index] || paramName, + value: file + }); + }); + } + } else { + if (that._isInstanceOf('FormData', options.formData)) { + formData = options.formData; + } else { + formData = new FormData(); + $.each(this._getFormData(options), function (index, field) { + formData.append(field.name, field.value); + }); + } + if (options.blob) { + options.headers['Content-Disposition'] = 'attachment; filename="' + + encodeURI(file.name) + '"'; + formData.append(paramName, options.blob, file.name); + } else { + $.each(options.files, function (index, file) { + // This check allows the tests to run with + // dummy objects: + if (that._isInstanceOf('File', file) || + that._isInstanceOf('Blob', file)) { + formData.append( + options.paramName[index] || paramName, + file, + file.name + ); + } + }); + } + } + options.data = formData; + } + // Blob reference is not needed anymore, free memory: + options.blob = null; + }, + + _initIframeSettings: function (options) { + var targetHost = $('').prop('href', options.url).prop('host'); + // Setting the dataType to iframe enables the iframe transport: + options.dataType = 'iframe ' + (options.dataType || ''); + // The iframe transport accepts a serialized array as form data: + options.formData = this._getFormData(options); + // Add redirect url to form data on cross-domain uploads: + if (options.redirect && targetHost && targetHost !== location.host) { + options.formData.push({ + name: options.redirectParamName || 'redirect', + value: options.redirect + }); + } + }, + + _initDataSettings: function (options) { + if (this._isXHRUpload(options)) { + if (!this._chunkedUpload(options, true)) { + if (!options.data) { + this._initXHRData(options); + } + this._initProgressListener(options); + } + if (options.postMessage) { + // Setting the dataType to postmessage enables the + // postMessage transport: + options.dataType = 'postmessage ' + (options.dataType || ''); + } + } else { + this._initIframeSettings(options); + } + }, + + _getParamName: function (options) { + var fileInput = $(options.fileInput), + paramName = options.paramName; + if (!paramName) { + paramName = []; + fileInput.each(function () { + var input = $(this), + name = input.prop('name') || 'files[]', + i = (input.prop('files') || [1]).length; + while (i) { + paramName.push(name); + i -= 1; + } + }); + if (!paramName.length) { + paramName = [fileInput.prop('name') || 'files[]']; + } + } else if (!$.isArray(paramName)) { + paramName = [paramName]; + } + return paramName; + }, + + _initFormSettings: function (options) { + // Retrieve missing options from the input field and the + // associated form, if available: + if (!options.form || !options.form.length) { + options.form = $(options.fileInput.prop('form')); + // If the given file input doesn't have an associated form, + // use the default widget file input's form: + if (!options.form.length) { + options.form = $(this.options.fileInput.prop('form')); + } + } + options.paramName = this._getParamName(options); + if (!options.url) { + options.url = options.form.prop('action') || location.href; + } + // The HTTP request method must be "POST" or "PUT": + options.type = (options.type || options.form.prop('method') || '') + .toUpperCase(); + if (options.type !== 'POST' && options.type !== 'PUT' && + options.type !== 'PATCH') { + options.type = 'POST'; + } + if (!options.formAcceptCharset) { + options.formAcceptCharset = options.form.attr('accept-charset'); + } + }, + + _getAJAXSettings: function (data) { + var options = $.extend({}, this.options, data); + this._initFormSettings(options); + this._initDataSettings(options); + return options; + }, + + // jQuery 1.6 doesn't provide .state(), + // while jQuery 1.8+ removed .isRejected() and .isResolved(): + _getDeferredState: function (deferred) { + if (deferred.state) { + return deferred.state(); + } + if (deferred.isResolved()) { + return 'resolved'; + } + if (deferred.isRejected()) { + return 'rejected'; + } + return 'pending'; + }, + + // Maps jqXHR callbacks to the equivalent + // methods of the given Promise object: + _enhancePromise: function (promise) { + promise.success = promise.done; + promise.error = promise.fail; + promise.complete = promise.always; + return promise; + }, + + // Creates and returns a Promise object enhanced with + // the jqXHR methods abort, success, error and complete: + _getXHRPromise: function (resolveOrReject, context, args) { + var dfd = $.Deferred(), + promise = dfd.promise(); + context = context || this.options.context || promise; + if (resolveOrReject === true) { + dfd.resolveWith(context, args); + } else if (resolveOrReject === false) { + dfd.rejectWith(context, args); + } + promise.abort = dfd.promise; + return this._enhancePromise(promise); + }, + + // Adds convenience methods to the data callback argument: + _addConvenienceMethods: function (e, data) { + var that = this, + getPromise = function (data) { + return $.Deferred().resolveWith(that, [data]).promise(); + }; + data.process = function (resolveFunc, rejectFunc) { + if (resolveFunc || rejectFunc) { + data._processQueue = this._processQueue = + (this._processQueue || getPromise(this)) + .pipe(resolveFunc, rejectFunc); + } + return this._processQueue || getPromise(this); + }; + data.submit = function () { + if (this.state() !== 'pending') { + data.jqXHR = this.jqXHR = + (that._trigger('submit', e, this) !== false) && + that._onSend(e, this); + } + return this.jqXHR || that._getXHRPromise(); + }; + data.abort = function () { + if (this.jqXHR) { + return this.jqXHR.abort(); + } + return that._getXHRPromise(); + }; + data.state = function () { + if (this.jqXHR) { + return that._getDeferredState(this.jqXHR); + } + if (this._processQueue) { + return that._getDeferredState(this._processQueue); + } + }; + data.progress = function () { + return this._progress; + }; + data.response = function () { + return this._response; + }; + }, + + // Parses the Range header from the server response + // and returns the uploaded bytes: + _getUploadedBytes: function (jqXHR) { + var range = jqXHR.getResponseHeader('Range'), + parts = range && range.split('-'), + upperBytesPos = parts && parts.length > 1 && + parseInt(parts[1], 10); + return upperBytesPos && upperBytesPos + 1; + }, + + // Uploads a file in multiple, sequential requests + // by splitting the file up in multiple blob chunks. + // If the second parameter is true, only tests if the file + // should be uploaded in chunks, but does not invoke any + // upload requests: + _chunkedUpload: function (options, testOnly) { + var that = this, + file = options.files[0], + fs = file.size, + ub = options.uploadedBytes = options.uploadedBytes || 0, + mcs = options.maxChunkSize || fs, + slice = file.slice || file.webkitSlice || file.mozSlice, + dfd = $.Deferred(), + promise = dfd.promise(), + jqXHR, + upload; + if (!(this._isXHRUpload(options) && slice && (ub || mcs < fs)) || + options.data) { + return false; + } + if (testOnly) { + return true; + } + if (ub >= fs) { + file.error = options.i18n('uploadedBytes'); + return this._getXHRPromise( + false, + options.context, + [null, 'error', file.error] + ); + } + // The chunk upload method: + upload = function () { + // Clone the options object for each chunk upload: + var o = $.extend({}, options), + currentLoaded = o._progress.loaded; + o.blob = slice.call( + file, + ub, + ub + mcs, + file.type + ); + // Store the current chunk size, as the blob itself + // will be dereferenced after data processing: + o.chunkSize = o.blob.size; + // Expose the chunk bytes position range: + o.contentRange = 'bytes ' + ub + '-' + + (ub + o.chunkSize - 1) + '/' + fs; + // Process the upload data (the blob and potential form data): + that._initXHRData(o); + // Add progress listeners for this chunk upload: + that._initProgressListener(o); + jqXHR = ((that._trigger('chunksend', null, o) !== false && $.ajax(o)) || + that._getXHRPromise(false, o.context)) + .done(function (result, textStatus, jqXHR) { + ub = that._getUploadedBytes(jqXHR) || + (ub + o.chunkSize); + // Create a progress event if no final progress event + // with loaded equaling total has been triggered + // for this chunk: + if (currentLoaded + o.chunkSize - o._progress.loaded) { + that._onProgress($.Event('progress', { + lengthComputable: true, + loaded: ub - o.uploadedBytes, + total: ub - o.uploadedBytes + }), o); + } + options.uploadedBytes = o.uploadedBytes = ub; + o.result = result; + o.textStatus = textStatus; + o.jqXHR = jqXHR; + that._trigger('chunkdone', null, o); + that._trigger('chunkalways', null, o); + if (ub < fs) { + // File upload not yet complete, + // continue with the next chunk: + upload(); + } else { + dfd.resolveWith( + o.context, + [result, textStatus, jqXHR] + ); + } + }) + .fail(function (jqXHR, textStatus, errorThrown) { + o.jqXHR = jqXHR; + o.textStatus = textStatus; + o.errorThrown = errorThrown; + that._trigger('chunkfail', null, o); + that._trigger('chunkalways', null, o); + dfd.rejectWith( + o.context, + [jqXHR, textStatus, errorThrown] + ); + }); + }; + this._enhancePromise(promise); + promise.abort = function () { + return jqXHR.abort(); + }; + upload(); + return promise; + }, + + _beforeSend: function (e, data) { + if (this._active === 0) { + // the start callback is triggered when an upload starts + // and no other uploads are currently running, + // equivalent to the global ajaxStart event: + this._trigger('start'); + // Set timer for global bitrate progress calculation: + this._bitrateTimer = new this._BitrateTimer(); + // Reset the global progress values: + this._progress.loaded = this._progress.total = 0; + this._progress.bitrate = 0; + } + // Make sure the container objects for the .response() and + // .progress() methods on the data object are available + // and reset to their initial state: + this._initResponseObject(data); + this._initProgressObject(data); + data._progress.loaded = data.loaded = data.uploadedBytes || 0; + data._progress.total = data.total = this._getTotal(data.files) || 1; + data._progress.bitrate = data.bitrate = 0; + this._active += 1; + // Initialize the global progress values: + this._progress.loaded += data.loaded; + this._progress.total += data.total; + }, + + _onDone: function (result, textStatus, jqXHR, options) { + var total = options._progress.total, + response = options._response; + if (options._progress.loaded < total) { + // Create a progress event if no final progress event + // with loaded equaling total has been triggered: + this._onProgress($.Event('progress', { + lengthComputable: true, + loaded: total, + total: total + }), options); + } + response.result = options.result = result; + response.textStatus = options.textStatus = textStatus; + response.jqXHR = options.jqXHR = jqXHR; + this._trigger('done', null, options); + }, + + _onFail: function (jqXHR, textStatus, errorThrown, options) { + var response = options._response; + if (options.recalculateProgress) { + // Remove the failed (error or abort) file upload from + // the global progress calculation: + this._progress.loaded -= options._progress.loaded; + this._progress.total -= options._progress.total; + } + response.jqXHR = options.jqXHR = jqXHR; + response.textStatus = options.textStatus = textStatus; + response.errorThrown = options.errorThrown = errorThrown; + this._trigger('fail', null, options); + }, + + _onAlways: function (jqXHRorResult, textStatus, jqXHRorError, options) { + // jqXHRorResult, textStatus and jqXHRorError are added to the + // options object via done and fail callbacks + this._trigger('always', null, options); + }, + + _onSend: function (e, data) { + if (!data.submit) { + this._addConvenienceMethods(e, data); + } + var that = this, + jqXHR, + aborted, + slot, + pipe, + options = that._getAJAXSettings(data), + send = function () { + that._sending += 1; + // Set timer for bitrate progress calculation: + options._bitrateTimer = new that._BitrateTimer(); + jqXHR = jqXHR || ( + ((aborted || that._trigger('send', e, options) === false) && + that._getXHRPromise(false, options.context, aborted)) || + that._chunkedUpload(options) || $.ajax(options) + ).done(function (result, textStatus, jqXHR) { + that._onDone(result, textStatus, jqXHR, options); + }).fail(function (jqXHR, textStatus, errorThrown) { + that._onFail(jqXHR, textStatus, errorThrown, options); + }).always(function (jqXHRorResult, textStatus, jqXHRorError) { + that._onAlways( + jqXHRorResult, + textStatus, + jqXHRorError, + options + ); + that._sending -= 1; + that._active -= 1; + if (options.limitConcurrentUploads && + options.limitConcurrentUploads > that._sending) { + // Start the next queued upload, + // that has not been aborted: + var nextSlot = that._slots.shift(); + while (nextSlot) { + if (that._getDeferredState(nextSlot) === 'pending') { + nextSlot.resolve(); + break; + } + nextSlot = that._slots.shift(); + } + } + if (that._active === 0) { + // The stop callback is triggered when all uploads have + // been completed, equivalent to the global ajaxStop event: + that._trigger('stop'); + } + }); + return jqXHR; + }; + this._beforeSend(e, options); + if (this.options.sequentialUploads || + (this.options.limitConcurrentUploads && + this.options.limitConcurrentUploads <= this._sending)) { + if (this.options.limitConcurrentUploads > 1) { + slot = $.Deferred(); + this._slots.push(slot); + pipe = slot.pipe(send); + } else { + pipe = (this._sequence = this._sequence.pipe(send, send)); + } + // Return the piped Promise object, enhanced with an abort method, + // which is delegated to the jqXHR object of the current upload, + // and jqXHR callbacks mapped to the equivalent Promise methods: + pipe.abort = function () { + aborted = [undefined, 'abort', 'abort']; + if (!jqXHR) { + if (slot) { + slot.rejectWith(options.context, aborted); + } + return send(); + } + return jqXHR.abort(); + }; + return this._enhancePromise(pipe); + } + return send(); + }, + + _onAdd: function (e, data) { + var that = this, + result = true, + options = $.extend({}, this.options, data), + limit = options.limitMultiFileUploads, + paramName = this._getParamName(options), + paramNameSet, + paramNameSlice, + fileSet, + i; + if (!(options.singleFileUploads || limit) || + !this._isXHRUpload(options)) { + fileSet = [data.files]; + paramNameSet = [paramName]; + } else if (!options.singleFileUploads && limit) { + fileSet = []; + paramNameSet = []; + for (i = 0; i < data.files.length; i += limit) { + fileSet.push(data.files.slice(i, i + limit)); + paramNameSlice = paramName.slice(i, i + limit); + if (!paramNameSlice.length) { + paramNameSlice = paramName; + } + paramNameSet.push(paramNameSlice); + } + } else { + paramNameSet = paramName; + } + data.originalFiles = data.files; + $.each(fileSet || data.files, function (index, element) { + var newData = $.extend({}, data); + newData.files = fileSet ? element : [element]; + newData.paramName = paramNameSet[index]; + that._initResponseObject(newData); + that._initProgressObject(newData); + that._addConvenienceMethods(e, newData); + result = that._trigger('add', e, newData); + return result; + }); + return result; + }, + + _replaceFileInput: function (input) { + var inputClone = input.clone(true); + $('
').append(inputClone)[0].reset(); + // Detaching allows to insert the fileInput on another form + // without loosing the file input value: + input.after(inputClone).detach(); + // Avoid memory leaks with the detached file input: + $.cleanData(input.unbind('remove')); + // Replace the original file input element in the fileInput + // elements set with the clone, which has been copied including + // event handlers: + this.options.fileInput = this.options.fileInput.map(function (i, el) { + if (el === input[0]) { + return inputClone[0]; + } + return el; + }); + // If the widget has been initialized on the file input itself, + // override this.element with the file input clone: + if (input[0] === this.element[0]) { + this.element = inputClone; + } + }, + + _handleFileTreeEntry: function (entry, path) { + var that = this, + dfd = $.Deferred(), + errorHandler = function (e) { + if (e && !e.entry) { + e.entry = entry; + } + // Since $.when returns immediately if one + // Deferred is rejected, we use resolve instead. + // This allows valid files and invalid items + // to be returned together in one set: + dfd.resolve([e]); + }, + dirReader; + path = path || ''; + if (entry.isFile) { + if (entry._file) { + // Workaround for Chrome bug #149735 + entry._file.relativePath = path; + dfd.resolve(entry._file); + } else { + entry.file(function (file) { + file.relativePath = path; + dfd.resolve(file); + }, errorHandler); + } + } else if (entry.isDirectory) { + dirReader = entry.createReader(); + dirReader.readEntries(function (entries) { + that._handleFileTreeEntries( + entries, + path + entry.name + '/' + ).done(function (files) { + dfd.resolve(files); + }).fail(errorHandler); + }, errorHandler); + } else { + // Return an empy list for file system items + // other than files or directories: + dfd.resolve([]); + } + return dfd.promise(); + }, + + _handleFileTreeEntries: function (entries, path) { + var that = this; + return $.when.apply( + $, + $.map(entries, function (entry) { + return that._handleFileTreeEntry(entry, path); + }) + ).pipe(function () { + return Array.prototype.concat.apply( + [], + arguments + ); + }); + }, + + _getDroppedFiles: function (dataTransfer) { + dataTransfer = dataTransfer || {}; + var items = dataTransfer.items; + if (items && items.length && (items[0].webkitGetAsEntry || + items[0].getAsEntry)) { + return this._handleFileTreeEntries( + $.map(items, function (item) { + var entry; + if (item.webkitGetAsEntry) { + entry = item.webkitGetAsEntry(); + if (entry) { + // Workaround for Chrome bug #149735: + entry._file = item.getAsFile(); + } + return entry; + } + return item.getAsEntry(); + }) + ); + } + return $.Deferred().resolve( + $.makeArray(dataTransfer.files) + ).promise(); + }, + + _getSingleFileInputFiles: function (fileInput) { + fileInput = $(fileInput); + var entries = fileInput.prop('webkitEntries') || + fileInput.prop('entries'), + files, + value; + if (entries && entries.length) { + return this._handleFileTreeEntries(entries); + } + files = $.makeArray(fileInput.prop('files')); + if (!files.length) { + value = fileInput.prop('value'); + if (!value) { + return $.Deferred().resolve([]).promise(); + } + // If the files property is not available, the browser does not + // support the File API and we add a pseudo File object with + // the input value as name with path information removed: + files = [{name: value.replace(/^.*\\/, '')}]; + } else if (files[0].name === undefined && files[0].fileName) { + // File normalization for Safari 4 and Firefox 3: + $.each(files, function (index, file) { + file.name = file.fileName; + file.size = file.fileSize; + }); + } + return $.Deferred().resolve(files).promise(); + }, + + _getFileInputFiles: function (fileInput) { + if (!(fileInput instanceof $) || fileInput.length === 1) { + return this._getSingleFileInputFiles(fileInput); + } + return $.when.apply( + $, + $.map(fileInput, this._getSingleFileInputFiles) + ).pipe(function () { + return Array.prototype.concat.apply( + [], + arguments + ); + }); + }, + + _onChange: function (e) { + var that = this, + data = { + fileInput: $(e.target), + form: $(e.target.form) + }; + this._getFileInputFiles(data.fileInput).always(function (files) { + data.files = files; + if (that.options.replaceFileInput) { + that._replaceFileInput(data.fileInput); + } + if (that._trigger('change', e, data) !== false) { + that._onAdd(e, data); + } + }); + }, + + _onPaste: function (e) { + var items = e.originalEvent && e.originalEvent.clipboardData && + e.originalEvent.clipboardData.items, + data = {files: []}; + if (items && items.length) { + $.each(items, function (index, item) { + var file = item.getAsFile && item.getAsFile(); + if (file) { + data.files.push(file); + } + }); + if (this._trigger('paste', e, data) === false || + this._onAdd(e, data) === false) { + return false; + } + } + }, + + _onDrop: function (e) { + var that = this, + dataTransfer = e.dataTransfer = e.originalEvent && + e.originalEvent.dataTransfer, + data = {}; + if (dataTransfer && dataTransfer.files && dataTransfer.files.length) { + e.preventDefault(); + this._getDroppedFiles(dataTransfer).always(function (files) { + data.files = files; + if (that._trigger('drop', e, data) !== false) { + that._onAdd(e, data); + } + }); + } + }, + + _onDragOver: function (e) { + var dataTransfer = e.dataTransfer = e.originalEvent && + e.originalEvent.dataTransfer; + if (dataTransfer) { + if (this._trigger('dragover', e) === false) { + return false; + } + if ($.inArray('Files', dataTransfer.types) !== -1) { + dataTransfer.dropEffect = 'copy'; + e.preventDefault(); + } + } + }, + + _initEventHandlers: function () { + if (this._isXHRUpload(this.options)) { + this._on(this.options.dropZone, { + dragover: this._onDragOver, + drop: this._onDrop + }); + this._on(this.options.pasteZone, { + paste: this._onPaste + }); + } + this._on(this.options.fileInput, { + change: this._onChange + }); + }, + + _destroyEventHandlers: function () { + this._off(this.options.dropZone, 'dragover drop'); + this._off(this.options.pasteZone, 'paste'); + this._off(this.options.fileInput, 'change'); + }, + + _setOption: function (key, value) { + var reinit = $.inArray(key, this._specialOptions) !== -1; + if (reinit) { + this._destroyEventHandlers(); + } + this._super(key, value); + if (reinit) { + this._initSpecialOptions(); + this._initEventHandlers(); + } + }, + + _initSpecialOptions: function () { + var options = this.options; + if (options.fileInput === undefined) { + options.fileInput = this.element.is('input[type="file"]') ? + this.element : this.element.find('input[type="file"]'); + } else if (!(options.fileInput instanceof $)) { + options.fileInput = $(options.fileInput); + } + if (!(options.dropZone instanceof $)) { + options.dropZone = $(options.dropZone); + } + if (!(options.pasteZone instanceof $)) { + options.pasteZone = $(options.pasteZone); + } + }, + + _getRegExp: function (str) { + var parts = str.split('/'), + modifiers = parts.pop(); + parts.shift(); + return new RegExp(parts.join('/'), modifiers); + }, + + _isRegExpOption: function (key, value) { + return key !== 'url' && $.type(value) === 'string' && + /^\/.*\/[igm]{0,3}$/.test(value); + }, + + _initDataAttributes: function () { + var that = this, + options = this.options; + // Initialize options set via HTML5 data-attributes: + $.each( + $(this.element[0].cloneNode(false)).data(), + function (key, value) { + if (that._isRegExpOption(key, value)) { + value = that._getRegExp(value); + } + options[key] = value; + } + ); + }, + + _create: function () { + this._initDataAttributes(); + this._initSpecialOptions(); + this._slots = []; + this._sequence = this._getXHRPromise(true); + this._sending = this._active = 0; + this._initProgressObject(this); + this._initEventHandlers(); + }, + + // This method is exposed to the widget API and allows to query + // the number of active uploads: + active: function () { + return this._active; + }, + + // This method is exposed to the widget API and allows to query + // the widget upload progress. + // It returns an object with loaded, total and bitrate properties + // for the running uploads: + progress: function () { + return this._progress; + }, + + // This method is exposed to the widget API and allows adding files + // using the fileupload API. The data parameter accepts an object which + // must have a files property and can contain additional options: + // .fileupload('add', {files: filesList}); + add: function (data) { + var that = this; + if (!data || this.options.disabled) { + return; + } + if (data.fileInput && !data.files) { + this._getFileInputFiles(data.fileInput).always(function (files) { + data.files = files; + that._onAdd(null, data); + }); + } else { + data.files = $.makeArray(data.files); + this._onAdd(null, data); + } + }, + + // This method is exposed to the widget API and allows sending files + // using the fileupload API. The data parameter accepts an object which + // must have a files or fileInput property and can contain additional options: + // .fileupload('send', {files: filesList}); + // The method returns a Promise object for the file upload call. + send: function (data) { + if (data && !this.options.disabled) { + if (data.fileInput && !data.files) { + var that = this, + dfd = $.Deferred(), + promise = dfd.promise(), + jqXHR, + aborted; + promise.abort = function () { + aborted = true; + if (jqXHR) { + return jqXHR.abort(); + } + dfd.reject(null, 'abort', 'abort'); + return promise; + }; + this._getFileInputFiles(data.fileInput).always( + function (files) { + if (aborted) { + return; + } + data.files = files; + jqXHR = that._onSend(null, data).then( + function (result, textStatus, jqXHR) { + dfd.resolve(result, textStatus, jqXHR); + }, + function (jqXHR, textStatus, errorThrown) { + dfd.reject(jqXHR, textStatus, errorThrown); + } + ); + } + ); + return this._enhancePromise(promise); + } + data.files = $.makeArray(data.files); + if (data.files.length) { + return this._onSend(null, data); + } + } + return this._getXHRPromise(false, data && data.context); + } + + }); + +})); diff --git a/files_widget/static/files_widget/js/jquery.iframe-transport.js b/files_widget/static/files_widget/js/jquery.iframe-transport.js new file mode 100644 index 0000000..073c5fb --- /dev/null +++ b/files_widget/static/files_widget/js/jquery.iframe-transport.js @@ -0,0 +1,205 @@ +/* + * jQuery Iframe Transport Plugin 1.7 + * https://github.com/blueimp/jQuery-File-Upload + * + * Copyright 2011, Sebastian Tschan + * https://blueimp.net + * + * Licensed under the MIT license: + * http://www.opensource.org/licenses/MIT + */ + +/*jslint unparam: true, nomen: true */ +/*global define, window, document */ + +(function (factory) { + 'use strict'; + if (typeof define === 'function' && define.amd) { + // Register as an anonymous AMD module: + define(['jquery'], factory); + } else { + // Browser globals: + factory(window.jQuery); + } +}(function ($) { + 'use strict'; + + // Helper variable to create unique names for the transport iframes: + var counter = 0; + + // The iframe transport accepts three additional options: + // options.fileInput: a jQuery collection of file input fields + // options.paramName: the parameter name for the file form data, + // overrides the name property of the file input field(s), + // can be a string or an array of strings. + // options.formData: an array of objects with name and value properties, + // equivalent to the return data of .serializeArray(), e.g.: + // [{name: 'a', value: 1}, {name: 'b', value: 2}] + $.ajaxTransport('iframe', function (options) { + if (options.async) { + var form, + iframe, + addParamChar; + return { + send: function (_, completeCallback) { + form = $('
'); + form.attr('accept-charset', options.formAcceptCharset); + addParamChar = /\?/.test(options.url) ? '&' : '?'; + // XDomainRequest only supports GET and POST: + if (options.type === 'DELETE') { + options.url = options.url + addParamChar + '_method=DELETE'; + options.type = 'POST'; + } else if (options.type === 'PUT') { + options.url = options.url + addParamChar + '_method=PUT'; + options.type = 'POST'; + } else if (options.type === 'PATCH') { + options.url = options.url + addParamChar + '_method=PATCH'; + options.type = 'POST'; + } + // javascript:false as initial iframe src + // prevents warning popups on HTTPS in IE6. + // IE versions below IE8 cannot set the name property of + // elements that have already been added to the DOM, + // so we set the name along with the iframe HTML markup: + counter += 1; + iframe = $( + '' + ).bind('load', function () { + var fileInputClones, + paramNames = $.isArray(options.paramName) ? + options.paramName : [options.paramName]; + iframe + .unbind('load') + .bind('load', function () { + var response; + // Wrap in a try/catch block to catch exceptions thrown + // when trying to access cross-domain iframe contents: + try { + response = iframe.contents(); + // Google Chrome and Firefox do not throw an + // exception when calling iframe.contents() on + // cross-domain requests, so we unify the response: + if (!response.length || !response[0].firstChild) { + throw new Error(); + } + } catch (e) { + response = undefined; + } + // The complete callback returns the + // iframe content document as response object: + completeCallback( + 200, + 'success', + {'iframe': response} + ); + // Fix for IE endless progress bar activity bug + // (happens on form submits to iframe targets): + $('') + .appendTo(form); + window.setTimeout(function () { + // Removing the form in a setTimeout call + // allows Chrome's developer tools to display + // the response result + form.remove(); + }, 0); + }); + form + .prop('target', iframe.prop('name')) + .prop('action', options.url) + .prop('method', options.type); + if (options.formData) { + $.each(options.formData, function (index, field) { + $('') + .prop('name', field.name) + .val(field.value) + .appendTo(form); + }); + } + if (options.fileInput && options.fileInput.length && + options.type === 'POST') { + fileInputClones = options.fileInput.clone(); + // Insert a clone for each file input field: + options.fileInput.after(function (index) { + return fileInputClones[index]; + }); + if (options.paramName) { + options.fileInput.each(function (index) { + $(this).prop( + 'name', + paramNames[index] || options.paramName + ); + }); + } + // Appending the file input fields to the hidden form + // removes them from their original location: + form + .append(options.fileInput) + .prop('enctype', 'multipart/form-data') + // enctype must be set as encoding for IE: + .prop('encoding', 'multipart/form-data'); + } + form.submit(); + // Insert the file input fields at their original location + // by replacing the clones with the originals: + if (fileInputClones && fileInputClones.length) { + options.fileInput.each(function (index, input) { + var clone = $(fileInputClones[index]); + $(input).prop('name', clone.prop('name')); + clone.replaceWith(input); + }); + } + }); + form.append(iframe).appendTo(document.body); + }, + abort: function () { + if (iframe) { + // javascript:false as iframe src aborts the request + // and prevents warning popups on HTTPS in IE6. + // concat is used to avoid the "Script URL" JSLint error: + iframe + .unbind('load') + .prop('src', 'javascript'.concat(':false;')); + } + if (form) { + form.remove(); + } + } + }; + } + }); + + // The iframe transport returns the iframe content document as response. + // The following adds converters from iframe to text, json, html, xml + // and script. + // Please note that the Content-Type for JSON responses has to be text/plain + // or text/html, if the browser doesn't include application/json in the + // Accept header, else IE will show a download dialog. + // The Content-Type for XML responses on the other hand has to be always + // application/xml or text/xml, so IE properly parses the XML response. + // See also + // https://github.com/blueimp/jQuery-File-Upload/wiki/Setup#content-type-negotiation + $.ajaxSetup({ + converters: { + 'iframe text': function (iframe) { + return iframe && $(iframe[0].body).text(); + }, + 'iframe json': function (iframe) { + return iframe && $.parseJSON($(iframe[0].body).text()); + }, + 'iframe html': function (iframe) { + return iframe && $(iframe[0].body).html(); + }, + 'iframe xml': function (iframe) { + var xmlDoc = iframe && iframe[0]; + return xmlDoc && $.isXMLDoc(xmlDoc) ? xmlDoc : + $.parseXML((xmlDoc.XMLDocument && xmlDoc.XMLDocument.xml) || + $(xmlDoc.body).html()); + }, + 'iframe script': function (iframe) { + return iframe && $.globalEval($(iframe[0].body).text()); + } + } + }); + +})); diff --git a/files_widget/static/files_widget/js/widgets.js b/files_widget/static/files_widget/js/widgets.js new file mode 100644 index 0000000..c311a1e --- /dev/null +++ b/files_widget/static/files_widget/js/widgets.js @@ -0,0 +1,485 @@ +$(function(){ + + var csrfToken = getCookie('csrftoken'), + widget = $('.files-widget'), + effectTime = 200, + mediaURL = $('[data-media-url]').data('media-url'), + staticURL = $('[data-static-url]').data('static-url'), + thumbnailURL = $('[data-get-thumbnail-url]').data('get-thumbnail-url'), + undoText = $('[data-undo-text]').data('undo-text'), + template, + deletedTemplate; + + template = + '
'+ + ''+ + ''+ + ''+ + ''+ + ''+ + ' '+ + ''+ + ''+ + ''+ + ''+ + ''+ + '
'+ + '
'+ + '
'+ + '
'; + + deletedTemplate = + '
'+ + ''+ + ''+ + ''+ + ''+ + ''+ + ''+ + undoText+ + ''+ + ''+ + '
'; + + function splitlines(str) { + return str.match(/[^\r\n]+/g) || []; + } + + function filenameFromPath(path) { + return path.replace(/^.+\//, ''); + } + + function stripMediaURL(path) { + if (path.indexOf(mediaURL) === 0) { + return path.replace(mediaURL, ''); + } + return path; + } + + function getCookie(name) { + var cookieValue = null; + if (document.cookie && document.cookie != '') { + var cookies = document.cookie.split(';'); + for (var i = 0; i < cookies.length; i++) { + var cookie = jQuery.trim(cookies[i]); + // Does this cookie string begin with the name we want? + if (cookie.substring(0, name.length + 1) == (name + '=')) { + cookieValue = decodeURIComponent(cookie.substring(name.length + 1)); + break; + } + } + } + return cookieValue; + } + + function numberformat( number, decimals, dec_point, thousands_sep ) { + // http://kevin.vanzonneveld.net + // + original by: Jonas Raoni Soares Silva (http://www.jsfromhell.com) + // + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net) + // * example 1: number_format(1234.5678, 2, '.', ''); + // * returns 1: 1234.57 + + var n = number, c = isNaN(decimals = Math.abs(decimals)) ? 2 : decimals; + var d = dec_point == undefined ? "," : dec_point; + var t = thousands_sep == undefined ? "." : thousands_sep, s = n < 0 ? "-" : ""; + var i = parseInt(n = Math.abs(+n || 0).toFixed(c)) + "", j = (j = i.length) > 3 ? j % 3 : 0; + + return s + (j ? i.substr(0, j) + t : "") + i.substr(j).replace(/(\d{3})(?=\d)/g, "$1" + t) + (c ? d + Math.abs(n - i).toFixed(c).slice(2) : ""); + } + + function sizeformat(filesize, bits) { + var b = 'B'; + if (bits) { + b = 'b'; + } + + // from http://snipplr.com/view/5945/javascript-numberformat--ported-from-php/ + if (filesize >= 1073741824) { + filesize = numberformat(filesize / 1073741824, 2, '.', '') + 'G' + b; + } else { + if (filesize >= 1048576) { + filesize = numberformat(filesize / 1048576, 2, '.', '') + 'M' + b; + } else { + if (filesize >= 1024) { + filesize = numberformat(filesize / 1024, 0) + 'k' + b; + } else { + filesize = numberformat(filesize, 0) + b; + }; + }; + }; + return filesize; + }; + + function fillIn(input, files) { + var value = ''; + files.each(function() { + var path = $(this).data('image-path'); + if (path) { + value += path + '\n'; + } + }); + input.val(value); + } + + function fillInHiddenInputs(dropbox, movedOutFile, movedInFile) { + var input = $('input[name="' + dropbox.data('input-name') + '_0"]'), + deletedInput = $('input[name="' + dropbox.data('input-name') + '_1"]'), + movedInput = $('input[name="' + dropbox.data('input-name') + '_2"]'), + widget = input.closest('.files-widget'), + files = widget.find('.preview'), + deletedFiles = widget.find('.deleted-file'); + + fillIn(input, files.add(movedInFile)); + fillIn(deletedInput, deletedFiles); + + if (movedOutFile) { + var movedInputValue = splitlines(movedInput.val()), + filename = movedOutFile.data('image-path'); + + movedInputValue.push(filename); + movedInput.val(movedInputValue.join('\n')); + } + if (movedInFile) { + var movedInputValue = splitlines(movedInput.val()), + filename = movedInFile.data('image-path'), + index = movedInputValue.indexOf(filename); + + if (index != -1) { + movedInputValue.splice(index, 1); + movedInput.val(movedInputValue); + } + } + } + + function downloadThumbnail(preview) { + var imagePath = stripMediaURL(preview.data('image-path')), + dropbox = preview.closest('.files-widget-dropbox'), + previewSize = dropbox.data('preview-size'); + + $.get(thumbnailURL, + 'img=' + encodeURIComponent(imagePath) + '&preview_size=' + previewSize, + function(data) { + preview.find('.thumbnail') + .css({ 'width': '', 'height': '' }).attr('src', data); + preview.removeClass('new'); + }); + } + + function generateThumbnail(preview, file) { + var image = $('.thumbnail', preview), + reader = new FileReader(), + dropbox = preview.closest('.files-widget-dropbox'), + previewSize = dropbox.data('preview-size'), + defaultSize = parseInt(+previewSize * 2 / 3, 10); + + reader.onload = function(e) { + image.attr('src', e.target.result); + image.css({ 'width': '', 'height': '' }); + }; + + image.css({ + 'max-width': previewSize, 'max-height': previewSize, + 'width': defaultSize, 'height': defaultSize + }); + + if (file.size < 500000) { + reader.readAsDataURL(file); + } else { + $('').addClass('filename').text(file.name) + .appendTo(preview.find('.image-holder')); + } + } + + function addPreview(dropbox, imagePath, thumbnailPath, file, fromHiddenInput) { + var preview = $(template); + + if (dropbox.data('multiple') != 1) { + dropbox.find('.preview').each(function() { + deletePreview($(this), true); + }); + } + + dropbox.find('.message').hide(); + preview.hide().insertAfter(dropbox.children(':last-child')).fadeIn(effectTime); + if (imagePath) { + completePreview(preview, imagePath, thumbnailPath, fromHiddenInput); + } else if (file) { + generateThumbnail(preview, file); + } + + return preview; + } + + function completePreview(preview, imagePath, thumbnailPath, fromHiddenInput) { + var dropbox = preview.closest('.files-widget-dropbox'); + + preview.removeClass('new').attr('data-image-path', imagePath); + preview.find('.progress-holder, .filename').remove(); + + if (thumbnailPath) { + preview.find('.thumbnail') + .css({ 'width': '', 'height': '' }).attr('src', thumbnailPath); + } else { + downloadThumbnail(preview); + } + if (!fromHiddenInput) { + fillInHiddenInputs(dropbox); + } + } + + function onPreviewMove(preview, oldDropbox, newDropbox) { + if (oldDropbox.is(newDropbox)) { + fillInHiddenInputs(oldDropbox); + } else { + if (newDropbox.data('multiple') != 1) { + newDropbox.find('.preview').not(preview).each(function() { + deletePreview($(this), true); + }); + } + fillInHiddenInputs(oldDropbox, preview, null); + fillInHiddenInputs(newDropbox, null, preview); + if (!oldDropbox.find('.preview').length) { + oldDropbox.find('.message').show(); + } + if (oldDropbox.data('preview-size') !== newDropbox.data('preview-size')) { + downloadThumbnail(preview); + } + } + } + + function deletePreview(preview, changingToNewPreview) { + var dropbox = preview.closest('.files-widget-dropbox'), + widget = dropbox.closest('.files-widget'), + deletedPreview = $(deletedTemplate), + deletedContainer = $('.files-widget-deleted', widget), + deletedList = $('.deleted-list', deletedContainer), + path = preview.data('image-path'); + + function doDelete() { + $('.icon', deletedPreview).attr('src', preview.find('.thumbnail').attr('src')); + $('.name', deletedPreview).text(filenameFromPath(path)); + deletedPreview.attr('data-image-path', path); + deletedContainer.show(); + deletedPreview.hide().appendTo(deletedList) + deletedPreview.slideDown(effectTime); + preview.remove(); + + if (!dropbox.find('.preview').length && !changingToNewPreview) { + dropbox.find('.message').show(); + }; + fillInHiddenInputs(dropbox); + } + + if (changingToNewPreview) { + doDelete(); + } else { + preview.fadeOut(effectTime, doDelete); + } + } + + function undoDeletePreview(deletedPreview) { + var imagePath = deletedPreview.data('image-path'), + thumbnailPath = $('.icon', deletedPreview).attr('src'), + widget = deletedPreview.closest('.files-widget'), + dropbox = widget.find('.files-widget-dropbox'), + deletedContainer = $('.files-widget-deleted', widget), + deletedList = $('.deleted-list', deletedContainer), + preview = addPreview(dropbox, imagePath, thumbnailPath); + + deletedPreview.slideUp(effectTime, function() { + $(this).remove(); + if (!deletedList.find('.deleted-file').length) { + deletedContainer.hide(); + }; + fillInHiddenInputs(dropbox); + }); + } + + $(document).bind('drag dragover', function (e) { + e.preventDefault(); + $('.files-widget-dropbox').addClass('dragging-files'); + }).bind('drop', function (e) { + e.preventDefault(); + $('.files-widget-dropbox').removeClass('dragging-files'); + }).bind('dragleave', function (e) { + $('.files-widget-dropbox').removeClass('dragging-files'); + }); + + widget.each(function() { + var that = $(this), + dropbox = $('.files-widget-dropbox', that), + filesInput = $('.files-input', that), + message = $('.message', dropbox), + uploadURL = dropbox.data('upload-url'), + multiple = dropbox.data('multiple') == 1, + previewSize = dropbox.data('preview-size'), + initialFiles = $('.preview', dropbox), + fileBrowserResultInput = $('.filebrowser-result', that), + deletedContainer = $('.files-widget-deleted', that), + deletedList = $('.deleted-list', deletedContainer), + stats = $('.upload-progress-stats', that), + hiddenInput = $('input[name="' + dropbox.data('input-name') + '_0"]'), + initialFileNames = splitlines(hiddenInput.val()), + name; + + for (name in initialFileNames) { + if (!initialFiles.filter('[data-image-path="' + initialFileNames[name] + '"]').length) { + addPreview(dropbox, initialFileNames[name], null, null, true); + } + } + + initialFiles = $('.preview', dropbox); + if (initialFiles.length) { + message.hide(); + } + if (deletedList.find('.deleted-file').length) { + deletedContainer.show(); + } + + dropbox.on('click', '.remove-button', function() { + var preview = $(this).closest('.preview'); + deletePreview(preview); + }); + + that.on('click', '.undo-remove-button', function() { + var deletedPreview = $(this).closest('.deleted-file'); + undoDeletePreview(deletedPreview); + }); + + dropbox.on('click', '.enlarge-button', function() { + window.open(mediaURL + $(this).closest('.preview').data('image-path')); + }); + + function onFileBrowserResult() { + var imagePath = stripMediaURL(fileBrowserResultInput.val()), + preview = addPreview(dropbox, imagePath); + fileBrowserResultInput.val(''); + } + + function checkFileBrowserResult() { + var oldVal = fileBrowserResultInput.val(), + checkInterval; + + checkInterval = setInterval(function() { + var newVal = fileBrowserResultInput.val(); + if (oldVal != newVal) { + clearInterval(checkInterval); + onFileBrowserResult(); + } + }, 250); + } + + $('.media-library-button', that).on('click', function() { + var url = window.__filebrowser_url || '/admin/media-library/browse/' + FileBrowser.show(fileBrowserResultInput.attr('id'), url + '?pop=1'); + checkFileBrowserResult(); + }); + + $('.add-by-url-button', that).on('click', function() { + $('.add-by-url-container', that).show(); + $(this).hide(); + $('.add-by-url', that).trigger('focus'); + }); + + $('.add-by-url', that).on('focusout', function() { + $('.add-by-url-button', that).show(); + $('.add-by-url-container', that).hide(); + }).on('keypress', function (e) { + var urlInput = $(this), + val = urlInput.val(); + + if (e.which == 13) { + e.stopPropagation(); + e.preventDefault(); + + $('.add-by-url-button', that).show(); + $('.add-by-url-container', that).hide(); + urlInput.val(''); + + if (val.length) { + addPreview(dropbox, val); + } + return false; + } + }); + + dropbox.disableSelection(); + dropbox.bind('dragover', function (e) { + dropbox.addClass('dragover'); + }).bind('dragleave drop', function (e) { + dropbox.removeClass('dragover'); + }); + + dropbox.sortable({ + placeholder: 'sortable-placeholder', + //tolerance: 'pointer', + connectWith: '.files-widget-dropbox', + //cursorAt: { top: 0, left: 0 }, + //items: '.preview:not(.controls-preview)', + revert: effectTime, + start: function(e, ui) { + $('.sortable-placeholder').width(ui.item.width()).height(ui.item.height()); + }, + over: function() { + message.hide(); + }, + beforeStop: function(e, ui) { + var newDropbox = ui.placeholder.closest('.files-widget-dropbox'); + onPreviewMove(ui.item, dropbox, newDropbox); + } + }); + + filesInput.fileupload({ + url: uploadURL, + type: 'POST', + dataType: 'json', + dropZone: dropbox, + pasteZone: dropbox, + paramName: 'files[]', + limitConcurrentUploads: 3, + formData: [ + { name: 'csrfmiddlewaretoken', value: csrfToken }, + { name: 'preview_size', value: previewSize } + ], + autoUpload: true, + maxFileSize: 10000000, + acceptFileTypes: /(\.|\/)(gif|jpe?g|png)$/i, + maxNumberOfFiles: undefined, + previewMaxWidth: 150, + previewMaxHeight: 150, + previewCrop: true, + add: function(e, data) { + var preview = addPreview(dropbox, undefined, undefined, data.files[0]); + data.context = preview; + data.submit(); + }, + submit: function(e, data) { + // console.log('submit', data); + // create thumbnail client side? + }, + done: function(e, data) { + completePreview(data.context, + data.result.imagePath, data.result.thumbnailPath); + }, + fail: function(e, data) { + //console.log('failed', data); + // display errors + }, + always: function(e, data) { + //console.log('always', data); + stats.text(''); + }, + progress: function(e, data) { + //console.log('progress', data); + var progress = parseInt(data.loaded / data.total * 100, 10); + data.context.find('.progress').css('width', progress + '%'); + }, + progressall: function(e, data) { + //console.log('progressall', data); + stats.text(sizeformat(data.loaded) + + ' of ' + sizeformat(data.total) + + ' (' + sizeformat(data.bitrate, true) + 'ps)'); + }, + }); + }); +}); diff --git a/files_widget/templates/files_widget/files_widget.html b/files_widget/templates/files_widget/files_widget.html new file mode 100644 index 0000000..aa6e974 --- /dev/null +++ b/files_widget/templates/files_widget/files_widget.html @@ -0,0 +1,91 @@ +{% load i18n files_widget_tags %} +
+
+ + {% if multiple %} + {% trans 'Drop multiple images here to upload' %} + {% else %} + {% trans 'Drop an image here to upload' %} + {% endif %} + + {% for path_to_file in files.splitlines %}{% spaceless %} +
+
+ {% if path_to_file|is_image %} + + {% else %} + + {% endif %} + +
+
+ {{ path_to_file|filename_from_path }} +
+ + + + + + + + +
+ {% endspaceless %}{% endfor %} +
+
+ + + {% trans 'Upload' %}... + + + {% if use_filebrowser %} +  |  + {% trans 'Library' %} + {% endif %} + {% if add_image_by_url %} +  |  + {% trans 'Add by url...' %} + + {% endif %} + {{ input_string }} + {# here comes the hacked mezzanine-filebrowser-safe integration #} + + + + + + + + + + +
+ +
diff --git a/files_widget/templates/files_widget/images_widget.html b/files_widget/templates/files_widget/images_widget.html new file mode 100644 index 0000000..073b21f --- /dev/null +++ b/files_widget/templates/files_widget/images_widget.html @@ -0,0 +1,90 @@ +{% load i18n files_widget_tags %} +
+
+ + {% if multiple %} + {% trans 'Drop multiple images here to upload' %} + {% else %} + {% trans 'Drop an image here to upload' %} + {% endif %} + + {% for path_to_file in files.splitlines %}{% spaceless %} +
+
+ {% if path_to_file|is_image %} + + {% else %} + + {% endif %} + + + + + + + + + +
+
+ {{ path_to_file|filename_from_path }} +
+
+ {% endspaceless %}{% endfor %} +
+
+ + + {% trans 'Upload' %}... + + + {% if use_filebrowser %} +  |  + {% trans 'Library' %} + {% endif %} + {% if add_image_by_url %} +  |  + {% trans 'Add by url...' %} + + {% endif %} + {{ input_string }} + {# here comes the hacked mezzanine-filebrowser-safe integration #} + + + + + + + + + + +
+ +
diff --git a/files_widget/templates/files_widget/includes/file.html b/files_widget/templates/files_widget/includes/file.html new file mode 100644 index 0000000..a90f904 --- /dev/null +++ b/files_widget/templates/files_widget/includes/file.html @@ -0,0 +1,10 @@ +{% load i18n files_widget_tags %} + +
+
+ +
+
+ {{ path_to_file|filename_from_path }} +
+
\ No newline at end of file diff --git a/files_widget/templates/files_widget/includes/thumbnail.html b/files_widget/templates/files_widget/includes/thumbnail.html new file mode 100644 index 0000000..a4c64ca --- /dev/null +++ b/files_widget/templates/files_widget/includes/thumbnail.html @@ -0,0 +1 @@ +{% load thumbnail files_widget_tags %}{% thumbnail path_to_file|unquote preview_size|add:'x'|add:preview_size upscale=False format=path_to_file|thumbnail_format as thumb %}{{ thumb.url }}{% endthumbnail %} \ No newline at end of file diff --git a/files_widget/templatetags/__init__.py b/files_widget/templatetags/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/files_widget/templatetags/files_widget_tags.py b/files_widget/templatetags/files_widget_tags.py new file mode 100644 index 0000000..67cd2ae --- /dev/null +++ b/files_widget/templatetags/files_widget_tags.py @@ -0,0 +1,46 @@ +import re +from six.moves import urllib + +from django import template + + +register = template.Library() + +@register.tag +def sorl_thumbnail(parser, token): + from sorl.thumbnail.templatetags.thumbnail import thumbnail + try: + res = thumbnail(parser, token) + except Exception as e: + res = None + print('1') + return res + + +@register.filter +def is_image(path): + from ..files import is_file_image + from tEDataProj.settings import MEDIA_ROOT + path = f'{MEDIA_ROOT}{path}' + return is_file_image(path) + + +@register.filter +def thumbnail_format(path): + match = re.search(r'\.\w+$', path) + if match: + ext = match.group(0) + if ext.lower() in ['.gif', '.png']: + return 'PNG' + return 'JPEG' + +@register.filter +def filename_from_path(path): + path = re.sub(r'^.+\/', '', path) + path = re.sub(r'^.+\\', '', path) + return path + +@register.filter +def unquote(value): + "urldecode" + return urllib.parse.unquote(value) diff --git a/files_widget/urls.py b/files_widget/urls.py new file mode 100644 index 0000000..a767108 --- /dev/null +++ b/files_widget/urls.py @@ -0,0 +1,12 @@ +try: + from django.conf.urls.defaults import url +except: + from django.conf.urls import url + +from django.conf import settings +from .views import upload, thumbnail_url + +urlpatterns = [ + url(u'^upload/$', upload, name="files_widget_upload"), + url(u'^thumbnail-url/$', thumbnail_url, name="files_widget_get_thumbnail_url"), +] diff --git a/files_widget/utils.py b/files_widget/utils.py new file mode 100644 index 0000000..1798f53 --- /dev/null +++ b/files_widget/utils.py @@ -0,0 +1,5 @@ +from functools import partial + + +def curry(func, *a, **kw): + return partial(func, *a, **kw) diff --git a/files_widget/views.py b/files_widget/views.py new file mode 100644 index 0000000..56ebe37 --- /dev/null +++ b/files_widget/views.py @@ -0,0 +1,71 @@ +import json + +from django.http import Http404, HttpResponse, HttpResponseBadRequest +from django.conf import settings +from django.utils.translation import ugettext_lazy as _ +from django.template.loader import render_to_string +from django.contrib.auth.decorators import permission_required + +from .files import save_upload, is_file_image +from .controllers import FilePath, ImagePath + +# @permission_required('files_widget.can_upload_files') +def upload(request): + if not request.method == 'POST': + raise Http404 + + # if request.is_ajax(): + # # the file is stored raw in the request + # upload = request + # is_raw = True + # # AJAX Upload will pass the filename in the querystring if it is the "advanced" ajax upload + # try: + # filename = request.GET['files[0]'] + # except KeyError: + # return HttpResponseBadRequest(json.dumps({ + # 'success': False, + # 'message': 'Error while uploading file', + # })) + # not an ajax upload, so it was the "basic" iframe version with submission via form + # else: + is_raw = False + try: + upload = next(iter(request.FILES.values())) + except StopIteration: + return HttpResponseBadRequest(json.dumps({ + 'success': False, + 'message': 'Error while uploading file.', + })) + filename = upload.name + + path_to_file = save_upload(upload, filename, is_raw, request.user) + MEDIA_URL = settings.MEDIA_URL + + if 'preview_size' in request.POST: + preview_size = request.POST['preview_size'] + else: + preview_size = '64' + + if not is_file_image(f'{settings.MEDIA_ROOT}{path_to_file}'): + thumbnailPath = f'{settings.STATIC_URL}files_widget/img/file-icons/file_icon.png' + else: + thumbnailPath = render_to_string('files_widget/includes/thumbnail.html', locals()) + + return HttpResponse(json.dumps({ + 'success': True, + 'imagePath': path_to_file, + 'thumbnailPath': thumbnailPath, + })) + +# @permission_required('files_widget.can_upload_files') +def thumbnail_url(request): + try: + if not 'img' in request.GET or not 'preview_size' in request.GET: + raise Http404 + + thumbnail_url = ImagePath(request.GET['img']).thumbnail(request.GET['preview_size']).url + + except: + thumbnail_url = 'files_widget/static/files_widget/img/file-icons/file_icon.png' + + return HttpResponse(thumbnail_url) diff --git a/manage.py b/manage.py new file mode 100644 index 0000000..47a6680 --- /dev/null +++ b/manage.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +"""Django's command-line utility for administrative tasks.""" +import os +import sys + + +def main(): + """Run administrative tasks.""" + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'FirePlayProj.settings') + try: + from django.core.management import execute_from_command_line + except ImportError as exc: + raise ImportError( + "Couldn't import Django. Are you sure it's installed and " + "available on your PYTHONPATH environment variable? Did you " + "forget to activate a virtual environment?" + ) from exc + execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/templates/admin/change_form.html b/templates/admin/change_form.html new file mode 100644 index 0000000..0d7dcd0 --- /dev/null +++ b/templates/admin/change_form.html @@ -0,0 +1,6 @@ +{% extends "admin/change_form.html" %} + +{% block extrahead %} + +{{ block.super }} +{% endblock %} \ No newline at end of file