From db3d800bbad89ab97022654ec6902e7890b470e7 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Fri, 1 Jul 2016 23:44:17 +0300 Subject: [PATCH 01/42] remove products, auth by password --- README.md | 80 +---------- pyadmitad/api.py | 14 -- pyadmitad/items/__init__.py | 1 - pyadmitad/items/me.py | 2 +- pyadmitad/items/products.py | 156 --------------------- pyadmitad/tests/test_products.py | 226 ------------------------------- setup.py | 6 +- 7 files changed, 11 insertions(+), 474 deletions(-) delete mode 100644 pyadmitad/items/products.py delete mode 100644 pyadmitad/tests/test_products.py diff --git a/README.md b/README.md index e96a829..8f682ec 100644 --- a/README.md +++ b/README.md @@ -1,68 +1,34 @@ admitad-python-api ================== -A Python wrapper around the Admitad API +A Python wrapper around the [Admitad API](https://developers.admitad.com/en/) Install ------- -Dependencies - -* requests -* simplejson - -Install by cloning from the GitHub repo: - - $ git clone git://github.com/trezorg/admitad-python-api.git - $ cd admitad-python-api - $ python setup.py test - $ python setup.py build - $ python setup.py install - - or just - - $ cp -r admitad-python-api/pyadmitad path/to/destination - + pip install admitad-api Example ------- - +```python from pyadmitad import api client_id = "" client_secret = "" - username = "" - password = "" scope = "private_data" - - client = api.get_oauth_password_client( - client_id, - client_secret, - username, - password, - scope - ) - - or already having an access token - - client = api.get_oauth_client(access_token) - - info = client.Me.get() + access_toke = "" scope = "public_data" client = api.get_oauth_password_client( client_id, client_secret, - username, - password, scope ) - print client.WebsiteTypes.get() - print client.WebsiteTypes.get(limit=2, offset=1) - - + print(client.WebsiteTypes.get()) + print(client.WebsiteTypes.get(limit=2, offset=1)) +``` API Items ------------- @@ -292,38 +258,6 @@ API Items res = client.CampaignsManage.disconnect(c_id=6, w_id=22) -### Products ### - -###### Categories of products ###### - - res = client.ProductCategories.get() - res = client.ProductCategories.get(limit=1, order_by=-name) - res = client.ProductCategories.getOne(2) - - -###### Vendors of products ###### - - res = client.ProductVendors.get() - res = client.ProductVendors.get(limit=1, order_by=-name) - res = client.ProductVendors.getOne(2) - - -###### Campaigns with products ###### - - res = client.ProductCampaigns.get(22) - res = client.ProductCampaigns.get(22, limit=1, order_by=-name) - res = client.ProductCampaigns.getOne(22, 6) - - -###### Products for website ###### - - res = client.Products.get(22) - res = client.Products.get(22, limit=1) - res = client.Products.get(22, limit=1, order_by=-price) - res = client.Products.get(22, price_from=1000) - res = client.ProductCampaigns.getOne(22, 2) - - ### Announcements ### diff --git a/pyadmitad/api.py b/pyadmitad/api.py index 9d69943..c58282a 100644 --- a/pyadmitad/api.py +++ b/pyadmitad/api.py @@ -10,20 +10,6 @@ def get_authorizing_client(access_token, user_agent=None, debug=False): return client.Client(http_transport) -def get_oauth_password_client( - client_id, client_secret, - username, password, scopes, user_agent=None, debug=False): - auth = transport.oauth_password_authorization({ - 'client_id': client_id, - 'client_secret': client_secret, - 'username': username, - 'password': password, - 'scopes': scopes - }) - return get_authorizing_client( - auth['access_token'], user_agent=user_agent, debug=debug) - - def get_oauth_client_client( client_id, client_secret, scopes, user_agent=None, debug=False): auth = transport.oauth_client_authorization({ diff --git a/pyadmitad/items/__init__.py b/pyadmitad/items/__init__.py index b08849e..c9a27db 100644 --- a/pyadmitad/items/__init__.py +++ b/pyadmitad/items/__init__.py @@ -6,7 +6,6 @@ from pyadmitad.items.referrals import * from pyadmitad.items.banners import * from pyadmitad.items.campaigns import * -from pyadmitad.items.products import * from pyadmitad.items.announcements import * from pyadmitad.items.payments import * from pyadmitad.items.money_transfers import * diff --git a/pyadmitad/items/me.py b/pyadmitad/items/me.py index 955fabd..8175b6d 100644 --- a/pyadmitad/items/me.py +++ b/pyadmitad/items/me.py @@ -45,4 +45,4 @@ def get(self, **kwargs): res = client.Balance.get() """ kwargs['url'] = self.URL - return self.transport.set_method('GET').set_method("GET").request(**kwargs) + return self.transport.set_method("GET").request(**kwargs) diff --git a/pyadmitad/items/products.py b/pyadmitad/items/products.py deleted file mode 100644 index ca0a738..0000000 --- a/pyadmitad/items/products.py +++ /dev/null @@ -1,156 +0,0 @@ -from pyadmitad.items.base import Item - -__all__ = ( - 'ProductCategories', - 'ProductVendors', - 'ProductCampaigns', - 'Products', -) - - -class ProductCategories(Item): - """ - List of products categories - - Required scope - "public_data" - """ - URL = Item.prepare_url('products/categories') - SINGLE_URL = Item.prepare_url('products/categories/%(id)s') - - ORDERING = ('name',) - - def get(self, **kwargs): - """ - res = client.ProductCategories.get() - res = client.ProductCategories.get(limit=1, order_by=-name) - """ - kwargs['url'] = self.URL - kwargs['allowed_ordering'] = self.ORDERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_ordering(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - Here _id is category id. - - res = client.ProductCategories.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) - - -class ProductVendors(Item): - """ - List of products vendors - - Required scope - "public_data" - """ - URL = Item.prepare_url('products/vendors') - SINGLE_URL = Item.prepare_url('products/vendors/%(id)s') - - ORDERING = ('name',) - - def get(self, **kwargs): - """ - res = client.ProductVendors.get() - res = client.ProductVendors.get(limit=1, order_by=-name) - """ - kwargs['url'] = self.URL - kwargs['allowed_ordering'] = self.ORDERING - return self.transport.set_method('GET').set_pagination(**kwargs). \ - set_ordering(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - Here _id is category id. - - res = client.ProductVendors.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) - - -class ProductCampaigns(Item): - """ - List of campaigns that have products - - Required scope - "products_for_website" - """ - URL = Item.prepare_url('products/advcampaigns/website/%(id)s') - SINGLE_URL = Item.prepare_url( - 'products/advcampaigns/%(c_id)s/website/%(id)s') - - ORDERING = ('name',) - - def get(self, _id, **kwargs): - """ - Here _id is website id. - - res = client.ProductCampaigns.get(22) - res = client.ProductCampaigns.get(22, limit=1, order_by=-name) - """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['allowed_ordering'] = self.ORDERING - return self.transport.set_method('GET').set_pagination(**kwargs). \ - set_ordering(**kwargs).request(**kwargs) - - def getOne(self, _id, c_id, **kwargs): - """ - Here _id is website id and c_id is campaign id - - res = client.ProductCampaigns.getOne(22, 6) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['c_id'] = self.sanitize_id(c_id) - return self.transport.set_method('GET').request(**kwargs) - - -class Products(Item): - """ - List of products - - Required scope - "products_for_website" - """ - URL = Item.prepare_url('products/website/%(id)s') - SINGLE_URL = Item.prepare_url('products/%(p_id)s/website/%(id)s') - - ORDERING = ('price', 'category', 'vendor', 'campaign', 'date_updated') - FILTERING = { - 'keyword': Item.to_unicode, - 'price_from': int, - 'price_to': int, - 'campaign': int, - 'category': int, - 'vendor': int - } - - def get(self, _id, **kwargs): - """ - Here _id is website id. - - res = client.Products.get(22) - res = client.Products.get(22, limit=1) - res = client.Products.get(22, limit=1, order_by=-price) - res = client.Products.get(22, price_from=1000) - """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['allowed_ordering'] = self.ORDERING - kwargs['allowed_filtering'] = self.FILTERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_filtering(**kwargs).set_ordering(**kwargs).request(**kwargs) - - def getOne(self, _id, p_id, **kwargs): - """ - Here _id is website id and p_id is product id - - res = client.ProductCampaigns.getOne(22, 2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['p_id'] = self.sanitize_id(p_id) - return self.transport.set_method('GET').request(**kwargs) diff --git a/pyadmitad/tests/test_products.py b/pyadmitad/tests/test_products.py deleted file mode 100644 index f5b61ef..0000000 --- a/pyadmitad/tests/test_products.py +++ /dev/null @@ -1,226 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import ProductVendors, ProductCategories,\ - ProductCampaigns, Products -from pyadmitad.tests.base import BaseTestCase - - -PRODUCTS_RESULT = { - u'_meta': { - u'count': 156, - u'limit': 1, - u'offset': 1 - }, - u'results': [ - { - u'advcampaign': { - u'id': 6, - u'name': u'AdvCamp 1' - }, - u'available': True, - u'category': { - u'id': 3, - u'name': u'category-child1' - }, - u'currency': u'RUB', - u'description': None, - u'id': 2, - u'model': u'JAISALMER', - u'name': u'Свеча ароматическая Comme des Garcons', - u'param': { - u'Пол': u'Уни', - u'Размер': u'145 гр.' - }, - u'picture': u'http://cdn.admitad.com/some_file.jpg', - u'picture_orig': u'http://content.some/path/file.jpg', - u'price': 3900.0, - u'thumbnail': u'http://cdn.admitad.com/some_file.jpg', - u'typePrefix': u'Свеча ароматическая', - u'updated': u'2012-08-30 21:35:26', - u'url': u'http://ad.admitad.com/goto/' - u'195b832b828cb0fd8d17234642e5a7/?ulp=' - u'[[[http://www.boutique.ru/jewelleryandgifts/' - u'svechy_in_gifts/commedesgarcons/' - u'e9aeb173-a43a-11dd-892e-00304833051e]]]', - u'vendor': { - u'id': 1, - u'name': u'Comme des Garcons' - } - } - ] -} - - -class ProductVendorsTestCase(BaseTestCase): - - def test_get_product_vendors_request(self): - self.set_mocker(ProductVendors.URL, limit=1) - result = { - u'_meta': { - u'count': 752, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'id': 1, - u'name': u'Comme des Garcons' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductVendors.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() - - def test_get_product_vendors_with_id_request(self): - self.set_mocker(ProductVendors.SINGLE_URL, id=1, with_pagination=False) - result = { - u'id': 1, - u'name': u'Comme des Garcons' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductVendors.getOne(1) - self.assertEqual(res[u'id'], 1) - self.mocker.verify() - - -class ProductCategoriesTestCase(BaseTestCase): - - def test_get_product_categories_request(self): - self.set_mocker(ProductCategories.URL, limit=4) - result = { - u'_meta': { - u'count': 4, - u'limit': 4, - u'offset': 0 - }, - u'results': [ - { - u'id': 1, - u'name': u'category1' - }, - { - u'id': 2, - u'name': u'category2' - }, - { - u'id': 3, - u'name': u'category-child1', - u'parent': { - u'id': 1, - u'name': u'category1', - u'parent': None - } - }, - { - u'id': 4, - u'name': u'category4' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductCategories.get(limit=4) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 4) - self.assertEqual(len(res[u'results']), 4) - self.mocker.verify() - - def test_get_product_categories_with_id_request(self): - self.set_mocker( - ProductCategories.SINGLE_URL, id=1, with_pagination=False) - result = { - u'id': 1, - u'name': u'category1' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductCategories.getOne(1) - self.assertEqual(res[u'id'], 1) - self.mocker.verify() - - -class ProductCampaignsTestCase(BaseTestCase): - - def test_get_product_campaigns_request(self): - self.set_mocker(ProductCampaigns.URL, id=25, limit=1) - result = { - u'results': [ - { - u'count': 189, - u'id': 6, - u'name': u'AdvCamp 1' - } - ], - u'_meta': { - u'count': 2, - u'limit': 1, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductCampaigns.get(25, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(len(res[u'results']), 1) - self.mocker.verify() - - def test_get_product_campaigns_with_id_request(self): - self.set_mocker( - ProductCampaigns.SINGLE_URL, id=25, c_id=6, with_pagination=False) - result = { - u'count': 189, - u'id': 6, - u'name': u'AdvCamp 1' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductCampaigns.getOne(25, 6) - self.assertEqual(res[u'id'], 6) - self.mocker.verify() - - -class ProductsTestCase(BaseTestCase): - - def test_get_products_request(self): - self.set_mocker(Products.URL, id=25, limit=1, offset=1) - result = PRODUCTS_RESULT - self.mocker.result(result) - self.mocker.replay() - res = self.client.Products.get(25, limit=1, offset=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(len(res[u'results']), 1) - self.mocker.verify() - - def test_get_products_with_id_request(self): - self.set_mocker( - Products.SINGLE_URL, id=25, p_id=2, with_pagination=False) - result = PRODUCTS_RESULT['results'][0] - self.mocker.result(result) - self.mocker.replay() - res = self.client.Products.getOne(25, 2) - self.assertEqual(res[u'id'], 2) - self.mocker.verify() - - -if __name__ == '__main__': - unittest.main() diff --git a/setup.py b/setup.py index 7243a7e..c0dfa30 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from setuptools import setup, find_packages __author__ = 'trezorg@gmail.com' -__version__ = '0.0.1' +__version__ = '1.0.0' setup( name="pyadmitad", @@ -13,10 +13,10 @@ author_email='trezorg@gmail.com', description='A Python wrapper around the Admitad API', license='MIT', - url='https://github.com/trezorg/admitad-python-api.git', + url='https://github.com/admitad/admitad-python-api.git', keywords='admitad', packages=find_packages(exclude='tests'), - install_requires=['requests'], + install_requires=['requests', 'simplejson'], test_suite='nose.collector', tests_require=['nose', 'mocker'], classifiers=[ From 5aae30dc286a67324f170cd01d094f40409ef802 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Fri, 1 Jul 2016 23:59:55 +0300 Subject: [PATCH 02/42] remove money_transfer --- pyadmitad/items/__init__.py | 1 - pyadmitad/items/money_transfers.py | 88 ------------------- pyadmitad/tests/test_money_transfer.py | 112 ------------------------- 3 files changed, 201 deletions(-) delete mode 100644 pyadmitad/items/money_transfers.py delete mode 100644 pyadmitad/tests/test_money_transfer.py diff --git a/pyadmitad/items/__init__.py b/pyadmitad/items/__init__.py index c9a27db..fd875e2 100644 --- a/pyadmitad/items/__init__.py +++ b/pyadmitad/items/__init__.py @@ -8,4 +8,3 @@ from pyadmitad.items.campaigns import * from pyadmitad.items.announcements import * from pyadmitad.items.payments import * -from pyadmitad.items.money_transfers import * diff --git a/pyadmitad/items/money_transfers.py b/pyadmitad/items/money_transfers.py deleted file mode 100644 index 99a5c60..0000000 --- a/pyadmitad/items/money_transfers.py +++ /dev/null @@ -1,88 +0,0 @@ -from copy import deepcopy -from pyadmitad.items.base import Item - - -__all__ = ( - 'MoneyTransfers', - 'MoneyTransfersManage', -) - - -class MoneyTransfersBase(Item): - - ORDERING = ('date_created',) - FILTERING = { - 'sender': Item.to_unicode, - 'recipient': Item.to_unicode, - 'currency': Item.to_unicode, - } - - -class MoneyTransfers(Item): - """ - List of webmaster money transfers - - Required scope - "webmaster_money_transfers" - """ - URL = Item.prepare_url('webmaster_money_transfers') - SINGLE_URL = Item.prepare_url('webmaster_money_transfer/%(id)s') - - def get(self, **kwargs): - """ - res = client.MoneyTransfers.get() - res = client.MoneyTransfers.get(limit=2) - - """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').\ - set_pagination(**kwargs).set_filtering(**kwargs).\ - set_ordering(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - res = client.MoneyTransfers.getOne(_id=2) - res = client.MoneyTransfers.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) - - -class MoneyTransfersManage(Item): - """ - Manage webmaster money transfers - - Required scope - "manage_webmaster_money_transfers" - """ - CREATE_FIELDS = { - 'comment': lambda x: Item.sanitize_string_value(x, 'comment'), - 'recipient': lambda x: Item.sanitize_string_value(x, 'recipient'), - 'currency': lambda x: Item.sanitize_currency(x, 'currency'), - 'sum': lambda x: Item.sanitize_float_value(x, 'sum') - } - - CREATE_URL = Item.prepare_url('webmaster_money_transfer/create') - - @staticmethod - def sanitize_fields(fields, **kwargs): - data = deepcopy(kwargs) - for field in fields: - data[field] = fields[field](data.get(field)) - return dict([(key, value) for (key, value) in data.items() if value]) - - def create(self, **kwargs): - """ - Create a webmaster money transfers - - res = client.MoneyTransfersManage.create( - sender='webmaster', - recipient='recipient', - sum=200, - currency='USD', - comment='comment') - - """ - data = self.sanitize_fields(self.CREATE_FIELDS, **kwargs) - kwargs['url'] = self.CREATE_URL - return self.transport.set_method('POST').\ - set_data(data).request(**kwargs) diff --git a/pyadmitad/tests/test_money_transfer.py b/pyadmitad/tests/test_money_transfer.py deleted file mode 100644 index 127ec09..0000000 --- a/pyadmitad/tests/test_money_transfer.py +++ /dev/null @@ -1,112 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import MoneyTransfers, MoneyTransfersManage -from pyadmitad.tests.base import BaseTestCase - - -MONEY_TRANSFER_CREATE_DATA = dict( - currency='USD', - comment="test", - recipient="admitadppvweb", - sum='200.12', -) - - -class MoneyTransfersTestCase(BaseTestCase): - - def test_get_money_transfers_request(self): - self.set_mocker(MoneyTransfers.URL, limit=1) - result = { - "_meta": { - "count": 6, - "limit": 1, - "offset": 0 - }, - "results": [ - { - "comment": "test", - "sender": { - "username": "webmaster1", - "id": 96 - }, - "sum": 200.0, - "currency": "USD", - "date_created": "2013-12-06T12:28:29", - "recipient": { - "username": "admitadppvweb", - "id": 100 - }, - "id": 8 - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.MoneyTransfers.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(res[u'results'][0][u'currency'], u'USD') - self.mocker.verify() - - def test_get_money_transfers_request_with_id(self): - self.set_mocker(MoneyTransfers.SINGLE_URL, id=8, with_pagination=False) - result = { - "comment": "test", - "sender": { - "username": "webmaster1", - "id": 96 - }, - "sum": 200.0, - "currency": "USD", - "date_created": "2013-12-06T12:28:29", - "recipient": { - "username": "admitadppvweb", - "id": 100 - }, - "id": 8 - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.MoneyTransfers.getOne(8) - self.assertEqual(res[u'id'], 8) - self.mocker.verify() - - -class MoneyTransfersManageTestCase(BaseTestCase): - - def test_create_payments_request(self): - self.set_mocker(MoneyTransfersManage.CREATE_URL, - method='POST', - with_pagination=False, - data=MONEY_TRANSFER_CREATE_DATA) - result = { - "comment": "test", - "sender": { - "username": "webmaster1", - "id": 96 - }, - "sum": 200.12, - "currency": "USD", - "date_created": "2013-12-06T12:28:29", - "recipient": { - "username": "admitadppvweb", - "id": 100 - }, - "id": 9 - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.MoneyTransfersManage.create( - **MONEY_TRANSFER_CREATE_DATA) - self.assertEqual(res[u'comment'], u'test') - self.assertEqual(res[u'currency'], u'USD') - self.assertEqual(res[u'sum'], 200.12) - self.assertEqual(res[u'sender'][u'username'], u'webmaster1') - self.mocker.verify() - -if __name__ == '__main__': - unittest.main() From 24369dab55ea5ec7088501f123ebf2bc4fa5c367 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Sat, 2 Jul 2016 00:08:14 +0300 Subject: [PATCH 03/42] added CouponsCategories --- pyadmitad/items/announcements.py | 21 --------------------- pyadmitad/items/coupons.py | 24 +++++++++++++++++++----- 2 files changed, 19 insertions(+), 26 deletions(-) diff --git a/pyadmitad/items/announcements.py b/pyadmitad/items/announcements.py index 875b659..847eeaf 100644 --- a/pyadmitad/items/announcements.py +++ b/pyadmitad/items/announcements.py @@ -1,9 +1,7 @@ from pyadmitad.items.base import Item - __all__ = ( 'Announcements', - 'AnnouncementsManage' ) @@ -33,22 +31,3 @@ def getOne(self, _id, **kwargs): kwargs['url'] = self.SINGLE_URL kwargs['id'] = self.sanitize_id(_id) return self.transport.set_method('GET').request(**kwargs) - - -class AnnouncementsManage(Item): - """ - manage of announcements - - Required scope - "manage_announcements" - """ - DELETE_URL = Item.prepare_url('announcements/delete/%(id)s/') - - def delete(self, _id, **kwargs): - """ - Here _id is an announcement id - - res = client.AnnouncementsManage.delete(12) - """ - kwargs['url'] = self.DELETE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('POST').request(**kwargs) diff --git a/pyadmitad/items/coupons.py b/pyadmitad/items/coupons.py index 9cc0cff..ad14dcd 100644 --- a/pyadmitad/items/coupons.py +++ b/pyadmitad/items/coupons.py @@ -4,6 +4,7 @@ __all__ = ( 'Coupons', 'CouponsForWebsite', + 'CouponsCategories', ) @@ -46,7 +47,7 @@ def get(self, **kwargs): kwargs['url'] = self.URL kwargs['allowed_ordering'] = self.ORDERING kwargs['allowed_filtering'] = self.FILTERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ + return self.transport.get().set_pagination(**kwargs).\ set_ordering(**kwargs).set_filtering(**kwargs).request(**kwargs) def getOne(self, _id, **kwargs): @@ -56,7 +57,7 @@ def getOne(self, _id, **kwargs): """ kwargs['url'] = self.SINGLE_URL kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) + return self.transport.get().request(**kwargs) class CouponsForWebsite(CouponsBase): @@ -84,13 +85,12 @@ def get(self, _id, **kwargs): use campaign=[1, 2] res = client.CouponsForWebsite.get(2, campaign=[1, 2], category=2) - """ kwargs['url'] = self.URL kwargs['id'] = self.sanitize_id(_id) kwargs['allowed_ordering'] = self.ORDERING kwargs['allowed_filtering'] = self.FILTERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ + return self.transport.get().set_pagination(**kwargs).\ set_ordering(**kwargs).set_filtering(**kwargs).request(**kwargs) def getOne(self, _id, c_id, **kwargs): @@ -103,4 +103,18 @@ def getOne(self, _id, c_id, **kwargs): kwargs['url'] = self.SINGLE_URL kwargs['id'] = self.sanitize_id(_id) kwargs['c_id'] = self.sanitize_id(c_id) - return self.transport.set_method('GET').request(**kwargs) + return self.transport.get().request(**kwargs) + + +class CouponsCategories(CouponsBase): + + URL = Item.prepare_url('coupons/categories') + SINGLE_URL = Item.prepare_url('coupons/categories/%(id)s') + + def get(self, **kwargs): + kwargs['url'] = self.URL + return self.transport.get().set_pagination(**kwargs).request(**kwargs) + + def getOne(self, _id, **kwargs): + kwargs['url'] = self.SINGLE_URL + return self.transport.get().set_pagination(**kwargs).request(**kwargs) From 7253774c32e85f1eb5fb22a975172b78252878cd Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Sat, 2 Jul 2016 10:56:32 +0300 Subject: [PATCH 04/42] Added Landings resource --- pyadmitad/api.py | 2 -- pyadmitad/items/__init__.py | 2 ++ pyadmitad/items/landings.py | 27 +++++++++++++++++++++++++++ 3 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 pyadmitad/items/landings.py diff --git a/pyadmitad/api.py b/pyadmitad/api.py index c58282a..b2b7c0b 100644 --- a/pyadmitad/api.py +++ b/pyadmitad/api.py @@ -23,5 +23,3 @@ def get_oauth_client_client( def get_oauth_client(access_token, user_agent=None): return get_authorizing_client(access_token, user_agent=user_agent) - - diff --git a/pyadmitad/items/__init__.py b/pyadmitad/items/__init__.py index fd875e2..92bac7e 100644 --- a/pyadmitad/items/__init__.py +++ b/pyadmitad/items/__init__.py @@ -8,3 +8,5 @@ from pyadmitad.items.campaigns import * from pyadmitad.items.announcements import * from pyadmitad.items.payments import * +from pyadmitad.items.landings import * + diff --git a/pyadmitad/items/landings.py b/pyadmitad/items/landings.py new file mode 100644 index 0000000..712ac23 --- /dev/null +++ b/pyadmitad/items/landings.py @@ -0,0 +1,27 @@ +from pyadmitad.items.base import Item + +__all__ = [ + 'Landings', + 'LandingsForWebsite', +] + + +class Landings(Item): + + URL = Item.prepare_url('landings/%(campaign_id)s') + + def get(self, campaign_id, **kwargs): + kwargs['url'] = self.URL + kwargs['campaign_id'] = self.sanitize_id(campaign_id) + return self.transport.get().set_pagination(**kwargs).request(**kwargs) + + +class LandingsForWebsite(Item): + + URL = Item.prepare_url('landings/%(campaign_id)s/website/%(website_id)s') + + def get(self, campaign_id, website_id, **kwargs): + kwargs['url'] = self.URL + kwargs['campaign_id'] = self.sanitize_id(campaign_id) + kwargs['website_id'] = self.sanitize_id(website_id) + return self.transport.get().set_pagination(**kwargs).request(**kwargs) From 1ce921fc25de93d8dff533a0b8658f4b7c2ce268 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Sat, 2 Jul 2016 11:09:57 +0300 Subject: [PATCH 05/42] remove pagination from CouponsCategories.getOne --- pyadmitad/items/coupons.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pyadmitad/items/coupons.py b/pyadmitad/items/coupons.py index ad14dcd..6c337ac 100644 --- a/pyadmitad/items/coupons.py +++ b/pyadmitad/items/coupons.py @@ -109,12 +109,13 @@ def getOne(self, _id, c_id, **kwargs): class CouponsCategories(CouponsBase): URL = Item.prepare_url('coupons/categories') - SINGLE_URL = Item.prepare_url('coupons/categories/%(id)s') + SINGLE_URL = Item.prepare_url('coupons/categories/%(coupon_category_id)s') def get(self, **kwargs): kwargs['url'] = self.URL return self.transport.get().set_pagination(**kwargs).request(**kwargs) - def getOne(self, _id, **kwargs): + def getOne(self, coupon_category_id, **kwargs): kwargs['url'] = self.SINGLE_URL - return self.transport.get().set_pagination(**kwargs).request(**kwargs) + kwargs['coupon_category_id'] = self.sanitize_id(coupon_category_id) + return self.transport.get().request(**kwargs) From 47c4dbece7bc245298708361869ae5068d3f3088 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Mon, 4 Jul 2016 16:11:56 +0300 Subject: [PATCH 06/42] Remove money transfer from README --- README.md | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/README.md b/README.md index 8f682ec..d082ab6 100644 --- a/README.md +++ b/README.md @@ -288,26 +288,6 @@ API Items res = client.PaymentsManage.delete(71) -### Money transfers ### - - -###### List of money transfers ###### - - res = client.MoneyTransfers.get() - res = client.MoneyTransfers.get(limit=2) - res = client.MoneyTransfers.getOne(2) - res = client.MoneyTransfers.get(sender='sender') - res = client.MoneyTransfers.get(currency='USD') - -###### Manage money transfers ###### - - res = client.MoneyTransfersManage.create( - currency='USD', - recipient='recipient', - comment='comment', - sum=10) - - Notes ------ From 20c6315dfce75adea2261d884b41698d5d1af587 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Mon, 4 Jul 2016 16:18:27 +0300 Subject: [PATCH 07/42] new author email and name ;) --- setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index c0dfa30..183bad6 100644 --- a/setup.py +++ b/setup.py @@ -3,14 +3,14 @@ use_setuptools() from setuptools import setup, find_packages -__author__ = 'trezorg@gmail.com' +__author__ = 'dev@admitad.com' __version__ = '1.0.0' setup( name="pyadmitad", version=__version__, - author='Igor Nemilentsev', - author_email='trezorg@gmail.com', + author='Admitad Dev Bot', + author_email='dev@admitad.com', description='A Python wrapper around the Admitad API', license='MIT', url='https://github.com/admitad/admitad-python-api.git', From 8646df303fbd9f1271108d86b06f2cbaccde3a8c Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Mon, 4 Jul 2016 18:52:12 +0300 Subject: [PATCH 08/42] move sanitize_fields to Item --- pyadmitad/items/base.py | 8 ++++++++ pyadmitad/items/websites.py | 7 ------- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/pyadmitad/items/base.py b/pyadmitad/items/base.py index a8e89d4..fb7cd04 100644 --- a/pyadmitad/items/base.py +++ b/pyadmitad/items/base.py @@ -1,3 +1,4 @@ +from copy import deepcopy from datetime import datetime, date from pyadmitad.constants import DATE_FORMAT, BASE_URL,\ CURRENCIES, LONG_DATE_FORMAT @@ -12,6 +13,13 @@ def __init__(self, transport): def sanitize_id(self, _id, name='_id'): return self.sanitize_integer_value(_id, name) + @staticmethod + def sanitize_fields(fields, **kwargs): + data = deepcopy(kwargs) + for field in fields: + data[field] = fields[field](data.get(field)) + return dict([(key, value) for (key, value) in data.items() if value]) + @staticmethod def sanitize_non_blank_value(value, name): if not value: diff --git a/pyadmitad/items/websites.py b/pyadmitad/items/websites.py index f2ee1a7..e23e1d8 100644 --- a/pyadmitad/items/websites.py +++ b/pyadmitad/items/websites.py @@ -98,13 +98,6 @@ class WebsitesManage(Item): x, 'atnd_hits', blank=True) } - @staticmethod - def sanitize_fields(fields, **kwargs): - data = deepcopy(kwargs) - for field in fields: - data[field] = fields[field](data.get(field)) - return dict([(key, value) for (key, value) in data.items() if value]) - def create(self, **kwargs): """ res = client.WebsitesManage.create(name='test', ....) From 8c0bdb8bfbbcf1c87b0989068fe41470a6dcce94 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Mon, 4 Jul 2016 19:18:41 +0300 Subject: [PATCH 09/42] added OptCodes --- README.md | 26 +++++++-------- pyadmitad/items/__init__.py | 1 + pyadmitad/items/optcodes.py | 65 +++++++++++++++++++++++++++++++++++++ 3 files changed, 79 insertions(+), 13 deletions(-) create mode 100644 pyadmitad/items/optcodes.py diff --git a/README.md b/README.md index d082ab6..103aacf 100644 --- a/README.md +++ b/README.md @@ -11,23 +11,23 @@ Install Example ------- ```python - from pyadmitad import api +from pyadmitad import api - client_id = "" - client_secret = "" - scope = "private_data" - access_toke = "" +client_id = "" +client_secret = "" +scope = "private_data" +access_toke = "" - scope = "public_data" +scope = "public_data" - client = api.get_oauth_password_client( - client_id, - client_secret, - scope - ) +client = api.get_oauth_password_client( + client_id, + client_secret, + scope +) - print(client.WebsiteTypes.get()) - print(client.WebsiteTypes.get(limit=2, offset=1)) +print(client.WebsiteTypes.get()) +print(client.WebsiteTypes.get(limit=2, offset=1)) ``` API Items diff --git a/pyadmitad/items/__init__.py b/pyadmitad/items/__init__.py index 92bac7e..6c02e2d 100644 --- a/pyadmitad/items/__init__.py +++ b/pyadmitad/items/__init__.py @@ -9,4 +9,5 @@ from pyadmitad.items.announcements import * from pyadmitad.items.payments import * from pyadmitad.items.landings import * +from pyadmitad.items.optcodes import * diff --git a/pyadmitad/items/optcodes.py b/pyadmitad/items/optcodes.py new file mode 100644 index 0000000..e87fcf1 --- /dev/null +++ b/pyadmitad/items/optcodes.py @@ -0,0 +1,65 @@ +from pyadmitad.items.base import Item + + +__all__ = [ + 'OptCodes', + 'CampaignStatusOptCodesManager', + 'ActionOptCodesManager', +] + + +class BaseOptCodes(Item): + + DESC_MODE_SIMPLE = 0 + DESC_MODE_EXTENDED = 1 + + METHOD_GET = 0 + METHOD_POST = 1 + + ACTION_TYPE_ALL = 0 + ACTION_TYPE_SALE = 1 + ACTION_TYPE_LEAD = 2 + + ACTION_STATUS_NEW = 0 + ACTION_STATUS_APPROVED = 1 + ACTION_STATUS_DECLINED = 2 + ACTION_STATUS_PENDING = 3 + + +class OptCodes(BaseOptCodes): + + URL = Item.prepare_url('opt_codes') + SINGLE_URL = Item.prepare_url('opt_codes/%(optcode_id)s') + + def get(self, **kwargs): + kwargs['url'] = self.URL + return self.transport.get().set_pagination().request(**kwargs) + + def getOne(self, optcode_id, **kwargs): + kwargs['url'] = self.SINGLE_URL + kwargs['optcode_id'] = self.sanitize_id(optcode_id) + return self.transport.get().request(**kwargs) + + +class BaseOptCodesManager(BaseOptCodes): + + DELETE_URL = Item.prepare_url('opt_codes/delete/%(optcode_id)s') + + def delete(self, optcode_id): + data = { + 'url': self.DELETE_URL, + 'optcode_id': self.sanitize_id(optcode_id), + } + return self.transport.set_method('POST').request(**data) + + +class CampaignStatusOptCodesManager(BaseOptCodesManager): + + CREATE_URL = Item.prepare_url('opt_codes/offer/create') + UPDATE_URL = Item.prepare_url('opt_codes/offer/update/%(optcode_id)s') + + +class ActionOptCodesManager(BaseOptCodesManager): + + CREATE_URL = Item.prepare_url('opt_codes/action/create') + UPDATE_URL = Item.prepare_url('opt_codes/action/update/%(optcode_id)s') From 23a302820982c8e260526dd7e8c14080de622f03 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Mon, 4 Jul 2016 19:25:23 +0300 Subject: [PATCH 10/42] added News --- pyadmitad/items/__init__.py | 2 +- pyadmitad/items/news.py | 21 +++++++++++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 pyadmitad/items/news.py diff --git a/pyadmitad/items/__init__.py b/pyadmitad/items/__init__.py index 6c02e2d..f3541e7 100644 --- a/pyadmitad/items/__init__.py +++ b/pyadmitad/items/__init__.py @@ -10,4 +10,4 @@ from pyadmitad.items.payments import * from pyadmitad.items.landings import * from pyadmitad.items.optcodes import * - +from pyadmitad.items.news import * diff --git a/pyadmitad/items/news.py b/pyadmitad/items/news.py new file mode 100644 index 0000000..8eaf46e --- /dev/null +++ b/pyadmitad/items/news.py @@ -0,0 +1,21 @@ +from pyadmitad.items.base import Item + + +__all__ = [ + 'News', +] + + +class News(Item): + + URL = Item.prepare_url('news') + SINGLE_URL = Item.prepare_url('news/%(news_id)s') + + def get(self, **kwargs): + kwargs['url'] = self.URL + return self.transport.get().set_pagination(**kwargs).request(**kwargs) + + def getOne(self, news_id, **kwargs): + kwargs['url'] = self.SINGLE_URL + kwargs['news_id'] = self.sanitize_id(news_id) + return self.transport.get().set_pagination(**kwargs).request(**kwargs) From a09592097bbc1a3edfa7dc40fcf0e8f4a99b6bd6 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Mon, 4 Jul 2016 19:52:22 +0300 Subject: [PATCH 11/42] Added DepplinksManage --- pyadmitad/items/__init__.py | 1 + pyadmitad/items/deeplinks.py | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+) create mode 100644 pyadmitad/items/deeplinks.py diff --git a/pyadmitad/items/__init__.py b/pyadmitad/items/__init__.py index f3541e7..175962b 100644 --- a/pyadmitad/items/__init__.py +++ b/pyadmitad/items/__init__.py @@ -11,3 +11,4 @@ from pyadmitad.items.landings import * from pyadmitad.items.optcodes import * from pyadmitad.items.news import * +from pyadmitad.items.deeplinks import * diff --git a/pyadmitad/items/deeplinks.py b/pyadmitad/items/deeplinks.py new file mode 100644 index 0000000..e9a360b --- /dev/null +++ b/pyadmitad/items/deeplinks.py @@ -0,0 +1,24 @@ +from pyadmitad.items.base import Item + + +__all__ = [ + 'DeeplinksManage', +] + + +class DeeplinksManage(Item): + + CREATE_URL = Item.prepare_url('deeplink/%(website_id)s/advcampaign/%(campaign_id)s') + + CREATE_FIELDS = { + 'ulp': lambda x: Item.sanitize_string_value(x, 'ulp'), + 'subid': lambda x: Item.sanitize_string_value(x, 'subid', max_length=30, blank=True), + # todo: subid[1-4] + } + + def create(self, website_id, campaign_id, **kwargs): + data = self.sanitize_fields(self.CREATE_FIELDS, **kwargs) + kwargs['url'] = self.CREATE_URL + kwargs['website_id'] = self.sanitize_id(website_id) + kwargs['campaign_id'] = self.sanitize_id(campaign_id) + return self.transport.set_method('GET').set_data(data).request(**kwargs) From 6915a490600f7dfc5c5691c270039d569f51a54b Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Mon, 4 Jul 2016 20:18:17 +0300 Subject: [PATCH 12/42] Added LinksValidator --- README.md | 20 +------------------- pyadmitad/items/__init__.py | 1 + pyadmitad/items/deeplinks.py | 2 +- pyadmitad/items/links.py | 15 +++++++++++++++ 4 files changed, 18 insertions(+), 20 deletions(-) create mode 100644 pyadmitad/items/links.py diff --git a/README.md b/README.md index 103aacf..d908a28 100644 --- a/README.md +++ b/README.md @@ -16,9 +16,6 @@ from pyadmitad import api client_id = "" client_secret = "" scope = "private_data" -access_toke = "" - -scope = "public_data" client = api.get_oauth_password_client( client_id, @@ -26,8 +23,7 @@ client = api.get_oauth_password_client( scope ) -print(client.WebsiteTypes.get()) -print(client.WebsiteTypes.get(limit=2, offset=1)) +print(client.Me.get()) ``` API Items @@ -258,20 +254,6 @@ API Items res = client.CampaignsManage.disconnect(c_id=6, w_id=22) -### Announcements ### - - -###### List of announcements ###### - - res = client.Announcements.get() - res = client.Announcements.get(limit=1, offset=2) - res = client.Announcements.getOne(2) - -###### Manage announcements ###### - - res = client.AnnouncementsManage.delete(12) - - ### Payments ### diff --git a/pyadmitad/items/__init__.py b/pyadmitad/items/__init__.py index 175962b..9a05d9a 100644 --- a/pyadmitad/items/__init__.py +++ b/pyadmitad/items/__init__.py @@ -12,3 +12,4 @@ from pyadmitad.items.optcodes import * from pyadmitad.items.news import * from pyadmitad.items.deeplinks import * +from pyadmitad.items.links import * diff --git a/pyadmitad/items/deeplinks.py b/pyadmitad/items/deeplinks.py index e9a360b..9196c03 100644 --- a/pyadmitad/items/deeplinks.py +++ b/pyadmitad/items/deeplinks.py @@ -21,4 +21,4 @@ def create(self, website_id, campaign_id, **kwargs): kwargs['url'] = self.CREATE_URL kwargs['website_id'] = self.sanitize_id(website_id) kwargs['campaign_id'] = self.sanitize_id(campaign_id) - return self.transport.set_method('GET').set_data(data).request(**kwargs) + return self.transport.get().set_data(data).request(**kwargs) diff --git a/pyadmitad/items/links.py b/pyadmitad/items/links.py new file mode 100644 index 0000000..7da1978 --- /dev/null +++ b/pyadmitad/items/links.py @@ -0,0 +1,15 @@ +from pyadmitad.items.base import Item + + +class LinksValidator(Item): + + URL = Item.prepare_url('validate_links') + + GET_FIELDS = { + 'link': lambda x: Item.sanitize_string_value(x, 'link'), + } + + def get(self, link, **kwargs): + data = self.sanitize_fields(self.GET_FIELDS, link=link) + kwargs['url'] = self.URL + return self.transport.get().set_data(data).request(**kwargs) From 1d0ccd2f9c4d93350bdf198e8754f1bb00eebd07 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Mon, 4 Jul 2016 20:31:59 +0300 Subject: [PATCH 13/42] setup.py, tests, LICENSE.txt --- .travis.yml | 5 + LICENSE.txt | 21 ++ README.md | 7 +- ez_setup.py | 382 -------------------------- pyadmitad/items/optcodes.py | 38 +++ pyadmitad/tests/test_announcements.py | 25 +- pyadmitad/transport.py | 6 + setup.cfg | 2 + setup.py | 13 +- 9 files changed, 84 insertions(+), 415 deletions(-) create mode 100644 .travis.yml create mode 100644 LICENSE.txt delete mode 100644 ez_setup.py create mode 100644 setup.cfg diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..e7bf449 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,5 @@ +language: python +python: + - 2.7.11 + - 3.5 +script: python setup.py test diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..36f376e --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) [2016] [Raman Barkholenka] + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index d908a28..a444393 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -admitad-python-api +admitad-python-api [![Build Status](https://travis-ci.org/janitor/admitad-python-api.svg?branch=master)](https://travis-ci.org/janitor/admitad-python-api) ================== A Python wrapper around the [Admitad API](https://developers.admitad.com/en/) @@ -26,6 +26,11 @@ client = api.get_oauth_password_client( print(client.Me.get()) ``` +Tests +----- + + python setup.py test + API Items ------------- diff --git a/ez_setup.py b/ez_setup.py deleted file mode 100644 index 72d35a5..0000000 --- a/ez_setup.py +++ /dev/null @@ -1,382 +0,0 @@ -#!python -"""Bootstrap setuptools installation - -If you want to use setuptools in your package's setup.py, just include this -file in the same directory with it, and add this to the top of your setup.py:: - - from ez_setup import use_setuptools - use_setuptools() - -If you want to require a specific version of setuptools, set a download -mirror, or use an alternate download directory, you can do so by supplying -the appropriate options to ``use_setuptools()``. - -This file can also be run as a script to install or upgrade setuptools. -""" -import os -import shutil -import sys -import tempfile -import tarfile -import optparse -import subprocess -import platform - -from distutils import log - -try: - from site import USER_SITE -except ImportError: - USER_SITE = None - -DEFAULT_VERSION = "1.4" -DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" - -def _python_cmd(*args): - args = (sys.executable,) + args - return subprocess.call(args) == 0 - -def _check_call_py24(cmd, *args, **kwargs): - res = subprocess.call(cmd, *args, **kwargs) - class CalledProcessError(Exception): - pass - if not res == 0: - msg = "Command '%s' return non-zero exit status %d" % (cmd, res) - raise CalledProcessError(msg) -vars(subprocess).setdefault('check_call', _check_call_py24) - -def _install(tarball, install_args=()): - # extracting the tarball - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - tar = tarfile.open(tarball) - _extractall(tar) - tar.close() - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - - # installing - log.warn('Installing Setuptools') - if not _python_cmd('setup.py', 'install', *install_args): - log.warn('Something went wrong during the installation.') - log.warn('See the error message above.') - # exitcode will be 2 - return 2 - finally: - os.chdir(old_wd) - shutil.rmtree(tmpdir) - - -def _build_egg(egg, tarball, to_dir): - # extracting the tarball - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - tar = tarfile.open(tarball) - _extractall(tar) - tar.close() - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - - # building an egg - log.warn('Building a Setuptools egg in %s', to_dir) - _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) - - finally: - os.chdir(old_wd) - shutil.rmtree(tmpdir) - # returning the result - log.warn(egg) - if not os.path.exists(egg): - raise IOError('Could not build the egg.') - - -def _do_download(version, download_base, to_dir, download_delay): - egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' - % (version, sys.version_info[0], sys.version_info[1])) - if not os.path.exists(egg): - tarball = download_setuptools(version, download_base, - to_dir, download_delay) - _build_egg(egg, tarball, to_dir) - sys.path.insert(0, egg) - - # Remove previously-imported pkg_resources if present (see - # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). - if 'pkg_resources' in sys.modules: - del sys.modules['pkg_resources'] - - import setuptools - setuptools.bootstrap_install_from = egg - - -def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, download_delay=15): - # making sure we use the absolute path - to_dir = os.path.abspath(to_dir) - was_imported = 'pkg_resources' in sys.modules or \ - 'setuptools' in sys.modules - try: - import pkg_resources - except ImportError: - return _do_download(version, download_base, to_dir, download_delay) - try: - pkg_resources.require("setuptools>=" + version) - return - except pkg_resources.VersionConflict: - e = sys.exc_info()[1] - if was_imported: - sys.stderr.write( - "The required version of setuptools (>=%s) is not available,\n" - "and can't be installed while this script is running. Please\n" - "install a more recent version first, using\n" - "'easy_install -U setuptools'." - "\n\n(Currently using %r)\n" % (version, e.args[0])) - sys.exit(2) - else: - del pkg_resources, sys.modules['pkg_resources'] # reload ok - return _do_download(version, download_base, to_dir, - download_delay) - except pkg_resources.DistributionNotFound: - return _do_download(version, download_base, to_dir, - download_delay) - -def _clean_check(cmd, target): - """ - Run the command to download target. If the command fails, clean up before - re-raising the error. - """ - try: - subprocess.check_call(cmd) - except subprocess.CalledProcessError: - if os.access(target, os.F_OK): - os.unlink(target) - raise - -def download_file_powershell(url, target): - """ - Download the file at url to target using Powershell (which will validate - trust). Raise an exception if the command cannot complete. - """ - target = os.path.abspath(target) - cmd = [ - 'powershell', - '-Command', - "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(), - ] - _clean_check(cmd, target) - -def has_powershell(): - if platform.system() != 'Windows': - return False - cmd = ['powershell', '-Command', 'echo test'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except: - return False - finally: - devnull.close() - return True - -download_file_powershell.viable = has_powershell - -def download_file_curl(url, target): - cmd = ['curl', url, '--silent', '--output', target] - _clean_check(cmd, target) - -def has_curl(): - cmd = ['curl', '--version'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except: - return False - finally: - devnull.close() - return True - -download_file_curl.viable = has_curl - -def download_file_wget(url, target): - cmd = ['wget', url, '--quiet', '--output-document', target] - _clean_check(cmd, target) - -def has_wget(): - cmd = ['wget', '--version'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except: - return False - finally: - devnull.close() - return True - -download_file_wget.viable = has_wget - -def download_file_insecure(url, target): - """ - Use Python to download the file, even though it cannot authenticate the - connection. - """ - try: - from urllib.request import urlopen - except ImportError: - from urllib2 import urlopen - src = dst = None - try: - src = urlopen(url) - # Read/write all in one block, so we don't create a corrupt file - # if the download is interrupted. - data = src.read() - dst = open(target, "wb") - dst.write(data) - finally: - if src: - src.close() - if dst: - dst.close() - -download_file_insecure.viable = lambda: True - -def get_best_downloader(): - downloaders = [ - download_file_powershell, - download_file_curl, - download_file_wget, - download_file_insecure, - ] - - for dl in downloaders: - if dl.viable(): - return dl - -def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, delay=15, - downloader_factory=get_best_downloader): - """Download setuptools from a specified location and return its filename - - `version` should be a valid setuptools version number that is available - as an egg for download under the `download_base` URL (which should end - with a '/'). `to_dir` is the directory where the egg will be downloaded. - `delay` is the number of seconds to pause before an actual download - attempt. - - ``downloader_factory`` should be a function taking no arguments and - returning a function for downloading a URL to a target. - """ - # making sure we use the absolute path - to_dir = os.path.abspath(to_dir) - tgz_name = "setuptools-%s.tar.gz" % version - url = download_base + tgz_name - saveto = os.path.join(to_dir, tgz_name) - if not os.path.exists(saveto): # Avoid repeated downloads - log.warn("Downloading %s", url) - downloader = downloader_factory() - downloader(url, saveto) - return os.path.realpath(saveto) - - -def _extractall(self, path=".", members=None): - """Extract all members from the archive to the current working - directory and set owner, modification time and permissions on - directories afterwards. `path' specifies a different directory - to extract to. `members' is optional and must be a subset of the - list returned by getmembers(). - """ - import copy - import operator - from tarfile import ExtractError - directories = [] - - if members is None: - members = self - - for tarinfo in members: - if tarinfo.isdir(): - # Extract directories with a safe mode. - directories.append(tarinfo) - tarinfo = copy.copy(tarinfo) - tarinfo.mode = 448 # decimal for oct 0700 - self.extract(tarinfo, path) - - # Reverse sort directories. - if sys.version_info < (2, 4): - def sorter(dir1, dir2): - return cmp(dir1.name, dir2.name) - directories.sort(sorter) - directories.reverse() - else: - directories.sort(key=operator.attrgetter('name'), reverse=True) - - # Set correct owner, mtime and filemode on directories. - for tarinfo in directories: - dirpath = os.path.join(path, tarinfo.name) - try: - self.chown(tarinfo, dirpath) - self.utime(tarinfo, dirpath) - self.chmod(tarinfo, dirpath) - except ExtractError: - e = sys.exc_info()[1] - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) - - -def _build_install_args(options): - """ - Build the arguments to 'python setup.py install' on the setuptools package - """ - install_args = [] - if options.user_install: - if sys.version_info < (2, 6): - log.warn("--user requires Python 2.6 or later") - raise SystemExit(1) - install_args.append('--user') - return install_args - -def _parse_args(): - """ - Parse the command line for options - """ - parser = optparse.OptionParser() - parser.add_option( - '--user', dest='user_install', action='store_true', default=False, - help='install in user site package (requires Python 2.6 or later)') - parser.add_option( - '--download-base', dest='download_base', metavar="URL", - default=DEFAULT_URL, - help='alternative URL from where to download the setuptools package') - parser.add_option( - '--insecure', dest='downloader_factory', action='store_const', - const=lambda: download_file_insecure, default=get_best_downloader, - help='Use internal, non-validating downloader' - ) - options, args = parser.parse_args() - # positional arguments are ignored - return options - -def main(version=DEFAULT_VERSION): - """Install or upgrade setuptools and EasyInstall""" - options = _parse_args() - tarball = download_setuptools(download_base=options.download_base, - downloader_factory=options.downloader_factory) - return _install(tarball, _build_install_args(options)) - -if __name__ == '__main__': - sys.exit(main()) diff --git a/pyadmitad/items/optcodes.py b/pyadmitad/items/optcodes.py index e87fcf1..be59d44 100644 --- a/pyadmitad/items/optcodes.py +++ b/pyadmitad/items/optcodes.py @@ -44,6 +44,11 @@ def getOne(self, optcode_id, **kwargs): class BaseOptCodesManager(BaseOptCodes): DELETE_URL = Item.prepare_url('opt_codes/delete/%(optcode_id)s') + CREATE_URL = '' + UPDATE_URL = '' + + CREATE_FIELDS = {} + UPDATE_FIELDS = {} def delete(self, optcode_id): data = { @@ -52,14 +57,47 @@ def delete(self, optcode_id): } return self.transport.set_method('POST').request(**data) + def create(self, **kwargs): + data = self.sanitize_fields(self.CREATE_FIELDS, **kwargs) + kwargs['url'] = self.CREATE_URL + return self.transport.set_method('POST').set_data(data).request(**kwargs) + + def update(self, optcode_id, **kwargs): + data = self.sanitize_fields(self.UPDATE_FIELDS, **kwargs) + kwargs['url'] = self.UPDATE_URL + kwargs['optcode_id'] = self.sanitize_id(optcode_id) + return self.transport.set_method('POST').set_data(data).request(**kwargs) + class CampaignStatusOptCodesManager(BaseOptCodesManager): CREATE_URL = Item.prepare_url('opt_codes/offer/create') UPDATE_URL = Item.prepare_url('opt_codes/offer/update/%(optcode_id)s') + CREATE_FIELDS = { + } + UPDATE_FIELDS = { + } + class ActionOptCodesManager(BaseOptCodesManager): CREATE_URL = Item.prepare_url('opt_codes/action/create') UPDATE_URL = Item.prepare_url('opt_codes/action/update/%(optcode_id)s') + + CREATE_FIELDS = { + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'desc_mode': lambda x: Item.sanitize_integer_value(x, 'desc_mode'), + 'url': lambda x: Item.sanitize_string_value(x, 'url'), + 'method': lambda x: Item.sanitize_integer_value(x, 'method'), + 'action_type': lambda x: Item.sanitize_integer_value(x, 'action_type'), + 'status': lambda x: Item.sanitize_integer_value(x, 'status'), + } + UPDATE_FIELDS = { + 'desc_mode': lambda x: Item.sanitize_integer_value(x, 'desc_mode', blank=True), + 'url': lambda x: Item.sanitize_string_value(x, 'url', blank=True), + 'method': lambda x: Item.sanitize_integer_value(x, 'method', blank=True), + 'action_type': lambda x: Item.sanitize_integer_value(x, 'action_type', blank=True), + 'status': lambda x: Item.sanitize_integer_value(x, 'status', blank=True), + } diff --git a/pyadmitad/tests/test_announcements.py b/pyadmitad/tests/test_announcements.py index e20f58a..f8e485e 100644 --- a/pyadmitad/tests/test_announcements.py +++ b/pyadmitad/tests/test_announcements.py @@ -1,7 +1,8 @@ -# -*- coding: utf-8 -*- +# coding: utf-8 import unittest -from pyadmitad.items import Announcements, AnnouncementsManage + +from pyadmitad.items import Announcements from pyadmitad.tests.base import BaseTestCase @@ -24,11 +25,6 @@ } } -ANNOUNCEMENTS_DELETE_RESULTS = { - u'message': u'Оповещение удалено успешно.', - u'success': u'Deleted' -} - class AnnouncementsTestCase(BaseTestCase): @@ -55,20 +51,5 @@ def test_get_announcements_request_with_id(self): self.mocker.verify() -class AnnouncementsManageTestCase(BaseTestCase): - - def test_delete_announcements_request(self): - self.set_mocker( - AnnouncementsManage.DELETE_URL, id=264, - with_pagination=False, method='POST') - result = ANNOUNCEMENTS_DELETE_RESULTS - self.mocker.result(result) - self.mocker.replay() - res = self.client.AnnouncementsManage.delete(264) - self.assertIn(u'message', res) - self.assertIn(u'success', res) - self.mocker.verify() - - if __name__ == '__main__': unittest.main() diff --git a/pyadmitad/transport.py b/pyadmitad/transport.py index 42694b1..8ee45bf 100644 --- a/pyadmitad/transport.py +++ b/pyadmitad/transport.py @@ -420,6 +420,12 @@ def set_method(self, method): # here we should clean data return self.clean_data() + def get(self): + return self.set_method('GET') + + def post(self): + return self.set_method('POST') + def set_debug(self, debug): self._debug = debug return self diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..224a779 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[metadata] +description-file = README.md \ No newline at end of file diff --git a/setup.py b/setup.py index 183bad6..bdc9868 100644 --- a/setup.py +++ b/setup.py @@ -1,21 +1,15 @@ -"""The setup and build script for the pyadmitad library.""" -from ez_setup import use_setuptools -use_setuptools() -from setuptools import setup, find_packages - -__author__ = 'dev@admitad.com' -__version__ = '1.0.0' +from setuptools import setup setup( name="pyadmitad", - version=__version__, + packages=['pyadmitad'], + version='1.0.0', author='Admitad Dev Bot', author_email='dev@admitad.com', description='A Python wrapper around the Admitad API', license='MIT', url='https://github.com/admitad/admitad-python-api.git', keywords='admitad', - packages=find_packages(exclude='tests'), install_requires=['requests', 'simplejson'], test_suite='nose.collector', tests_require=['nose', 'mocker'], @@ -31,4 +25,3 @@ "git+https://github.com/trezorg/mocker.git#egg=mocker", ], ) - From e0327c29c3c7e3eea40023c7c4e621494dc27d17 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Sun, 17 Jul 2016 12:23:40 +0300 Subject: [PATCH 14/42] fix bug in `sanitize_integer_value` --- pyadmitad/items/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyadmitad/items/base.py b/pyadmitad/items/base.py index fb7cd04..abd2a05 100644 --- a/pyadmitad/items/base.py +++ b/pyadmitad/items/base.py @@ -47,7 +47,7 @@ def sanitize_string_value( @staticmethod def sanitize_integer_value(value, name, blank=False): - if not value: + if value is None: if not blank: raise ValueError("Blank integer value '%s': %s" % (name, value)) return value From 74e35d0c2ac9826376de0ecba29a2557ce94dbf4 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Sun, 17 Jul 2016 12:23:57 +0300 Subject: [PATCH 15/42] fix statuses for optcodes --- pyadmitad/items/optcodes.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pyadmitad/items/optcodes.py b/pyadmitad/items/optcodes.py index be59d44..2d635f3 100644 --- a/pyadmitad/items/optcodes.py +++ b/pyadmitad/items/optcodes.py @@ -20,10 +20,10 @@ class BaseOptCodes(Item): ACTION_TYPE_SALE = 1 ACTION_TYPE_LEAD = 2 - ACTION_STATUS_NEW = 0 - ACTION_STATUS_APPROVED = 1 - ACTION_STATUS_DECLINED = 2 - ACTION_STATUS_PENDING = 3 + ACTION_STATUS_NEW = 5 + ACTION_STATUS_APPROVED = 6 + ACTION_STATUS_DECLINED = 7 + ACTION_STATUS_PENDING = 8 class OptCodes(BaseOptCodes): From 7e02855c84393e7676cbed25f46d5dd0e978b576 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Sun, 17 Jul 2016 12:44:12 +0300 Subject: [PATCH 16/42] added OfferStatusOptCodesManager, ActionOptCodesManager --- pyadmitad/items/optcodes.py | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/pyadmitad/items/optcodes.py b/pyadmitad/items/optcodes.py index 2d635f3..46b6b14 100644 --- a/pyadmitad/items/optcodes.py +++ b/pyadmitad/items/optcodes.py @@ -3,7 +3,7 @@ __all__ = [ 'OptCodes', - 'CampaignStatusOptCodesManager', + 'OfferStatusOptCodesManager', 'ActionOptCodesManager', ] @@ -25,6 +25,10 @@ class BaseOptCodes(Item): ACTION_STATUS_DECLINED = 7 ACTION_STATUS_PENDING = 8 + EVENT_ACTION = 0 + EVENT_OFFER_STATUS = 1 + EVENT_REFERRAL = 2 + class OptCodes(BaseOptCodes): @@ -69,14 +73,22 @@ def update(self, optcode_id, **kwargs): return self.transport.set_method('POST').set_data(data).request(**kwargs) -class CampaignStatusOptCodesManager(BaseOptCodesManager): +class OfferStatusOptCodesManager(BaseOptCodesManager): CREATE_URL = Item.prepare_url('opt_codes/offer/create') UPDATE_URL = Item.prepare_url('opt_codes/offer/update/%(optcode_id)s') CREATE_FIELDS = { + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'desc_mode': lambda x: Item.sanitize_integer_value(x, 'desc_mode'), + 'url': lambda x: Item.sanitize_string_value(x, 'url'), + 'method': lambda x: Item.sanitize_integer_value(x, 'method'), } UPDATE_FIELDS = { + 'desc_mode': lambda x: Item.sanitize_integer_value(x, 'desc_mode', blank=True), + 'url': lambda x: Item.sanitize_string_value(x, 'url', blank=True), + 'method': lambda x: Item.sanitize_integer_value(x, 'method', blank=True), } From 801cea932900680e26f46d33f8d61bae07c7c26b Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Sun, 17 Jul 2016 20:11:11 +0300 Subject: [PATCH 17/42] send files via transport --- pyadmitad/transport.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/pyadmitad/transport.py b/pyadmitad/transport.py index 8ee45bf..eef9f06 100644 --- a/pyadmitad/transport.py +++ b/pyadmitad/transport.py @@ -52,28 +52,28 @@ def prepare_request_data( def api_request( url, data=None, headers=None, method='GET', - timeout=None, ssl_verify=False, debug=False): + files=None, timeout=None, ssl_verify=False, debug=False): kwargs = prepare_request_data( data=data, headers=headers, method=method, timeout=timeout, ssl_verify=ssl_verify) status_code = 500 content = u'' try: - response = requests.request(method, url, **kwargs) + response = requests.request(method, url, files=files, **kwargs) debug_log(u'Request url: %s' % response.url, debug) - if method == 'POST': - debug_log(u'Request body: %s' % response.request.body, debug) + # if method == 'POST': + # debug_log(u'Request body: %s' % response.request.body, debug) status_code = response.status_code content = response.content if status_code >= 400: response.raise_for_status() - return response.json() except requests.HTTPError as err: raise HttpException(status_code, to_json(content), err) except requests.RequestException as err: raise ConnectionException(err) except (ValueError, TypeError) as err: raise JsonException(err) + return response.json() def get_credentials(client_id, client_secret): @@ -364,12 +364,13 @@ def to_value(self): class HttpTransport(object): - SUPPORTED_METHODS = ('GET', 'POST') + SUPPORTED_METHODS = ('GET', 'POST', 'DELETE') SUPPORTED_LANGUAGES = ('ru', 'en', 'de', 'pl') def __init__(self, access_token, method=None, user_agent=None, debug=False): self._headers = build_headers(access_token, user_agent=user_agent) self._method = method or 'GET' + self._files = None self._data = None self._url = None self._language = None @@ -392,6 +393,10 @@ def set_data(self, data): self._data = data return self + def set_files(self, files): + self._files = files + return self + def clean_data(self): self._data = None return self @@ -453,7 +458,8 @@ def request(self, **kwargs): 'method': self._method, 'headers': self._headers, 'data': self._data, - 'debug': self._debug + 'debug': self._debug, + 'files': self._files, } response = self.api_request(self._url, **requests_kwargs) return kwargs.get('handler', self._handle_response)(response) From 2685256e187e7b5d547225939b91d7c9be814fc2 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Sun, 17 Jul 2016 20:12:43 +0300 Subject: [PATCH 18/42] added LostOrders, LostOrdersManager --- pyadmitad/items/__init__.py | 1 + pyadmitad/items/lost_orders.py | 58 ++++++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+) create mode 100644 pyadmitad/items/lost_orders.py diff --git a/pyadmitad/items/__init__.py b/pyadmitad/items/__init__.py index 9a05d9a..b515901 100644 --- a/pyadmitad/items/__init__.py +++ b/pyadmitad/items/__init__.py @@ -13,3 +13,4 @@ from pyadmitad.items.news import * from pyadmitad.items.deeplinks import * from pyadmitad.items.links import * +from pyadmitad.items.lost_orders import * diff --git a/pyadmitad/items/lost_orders.py b/pyadmitad/items/lost_orders.py new file mode 100644 index 0000000..34710ac --- /dev/null +++ b/pyadmitad/items/lost_orders.py @@ -0,0 +1,58 @@ +# coding: utf-8 + +from pyadmitad.items.base import Item + + +__all__ = [ + 'LostOrders', + 'LostOrdersManager', +] + + +class LostOrders(Item): + + SCOPE = 'lost_orders' + + URL = Item.prepare_url('lost_orders') + SINGLE_URL = Item.prepare_url('lost_orders/%(lost_order_id)s') + + def get(self, **kwargs): + kwargs['url'] = self.URL + return self.transport.get().set_pagination().request(**kwargs) + + def getOne(self, lost_order_id, **kwargs): + kwargs['url'] = self.SINGLE_URL + kwargs['lost_order_id'] = self.sanitize_id(lost_order_id) + return self.transport.get().request(**kwargs) + + +class LostOrdersManager(Item): + + SCOPE = 'manage_lost_orders' + + DELETE_URL = Item.prepare_url('lost_orders/%(lost_order_id)s/decline') + CREATE_URL = Item.prepare_url('lost_orders/create') + + CREATE_FIELDS = { + 'advcampaign': lambda x: Item.sanitize_integer_value(x, 'advcampaign'), + 'website': lambda x: Item.sanitize_integer_value(x, 'website'), + 'order_id': lambda x: Item.sanitize_string_value(x, 'order_id'), + 'order_date': lambda x: Item.sanitize_string_value(x, 'order_date'), + 'order_price': lambda x: Item.sanitize_float_value(x, 'order_price'), + 'comment': lambda x: Item.sanitize_string_value(x, 'comment'), + } + + def delete(self, lost_order_id): + data = { + 'url': self.DELETE_URL, + 'lost_order_id': self.sanitize_id(lost_order_id), + } + return self.transport.set_method('DELETE').request(**data) + + def create(self, attachment, **kwargs): + data = self.sanitize_fields(self.CREATE_FIELDS, **kwargs) + kwargs['url'] = self.CREATE_URL + files = { + 'attachment': attachment, + } + return self.transport.post().set_data(data).set_files(files).request(**kwargs) From fd542cefea2df5abedc87352984217863ca38158 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Sun, 17 Jul 2016 20:28:51 +0300 Subject: [PATCH 19/42] debug argument for get_oauth_client --- pyadmitad/api.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyadmitad/api.py b/pyadmitad/api.py index b2b7c0b..75976c3 100644 --- a/pyadmitad/api.py +++ b/pyadmitad/api.py @@ -21,5 +21,5 @@ def get_oauth_client_client( auth['access_token'], user_agent=user_agent, debug=debug) -def get_oauth_client(access_token, user_agent=None): - return get_authorizing_client(access_token, user_agent=user_agent) +def get_oauth_client(access_token, user_agent=None, debug=False): + return get_authorizing_client(access_token, user_agent=user_agent, debug=debug) From b146c4db47bf858aec17c9d5f3a77233b7c75e73 Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Sun, 17 Jul 2016 20:45:22 +0300 Subject: [PATCH 20/42] fix tests --- pyadmitad/tests/base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pyadmitad/tests/base.py b/pyadmitad/tests/base.py index 28de23d..431d25d 100644 --- a/pyadmitad/tests/base.py +++ b/pyadmitad/tests/base.py @@ -35,6 +35,7 @@ def set_mocker(self, url, **kwargs): 'data': self.prepare_data(**kwargs), 'headers': build_headers(access_token), 'method': BaseTestCase.prepare_method(**kwargs), - 'debug': False + 'debug': False, + 'files': None, } obj.api_request(url, **kwargs) From 9fff5757c6ad1e3917f220236b1caab03905b653 Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Mon, 3 Oct 2016 16:44:52 +0300 Subject: [PATCH 21/42] update and add new items, remove deprecated code --- README.md | 321 +++++++++++- pyadmitad/api.py | 23 +- pyadmitad/client.py | 12 +- pyadmitad/constants.py | 18 +- pyadmitad/exceptions.py | 10 + pyadmitad/items/__init__.py | 20 +- pyadmitad/items/announcements.py | 36 +- pyadmitad/items/arecords.py | 41 ++ pyadmitad/items/auxiliary.py | 179 ++++--- pyadmitad/items/banners.py | 79 ++- pyadmitad/items/base.py | 158 +++--- pyadmitad/items/broken_links.py | 88 ++++ pyadmitad/items/campaigns.py | 127 +++-- pyadmitad/items/coupons.py | 175 ++++--- pyadmitad/items/deeplinks.py | 29 +- pyadmitad/items/landings.py | 43 +- pyadmitad/items/links.py | 16 +- pyadmitad/items/lost_orders.py | 63 ++- pyadmitad/items/me.py | 65 ++- pyadmitad/items/news.py | 34 +- pyadmitad/items/optcodes.py | 74 ++- pyadmitad/items/payments.py | 122 ++++- pyadmitad/items/referrals.py | 55 +- pyadmitad/items/retag.py | 150 ++++++ pyadmitad/items/statistics.py | 279 ++++++---- pyadmitad/items/tickets.py | 104 ++++ pyadmitad/items/websites.py | 175 +++++-- pyadmitad/tests/base.py | 47 +- pyadmitad/tests/data/image.png | Bin 0 -> 7605 bytes pyadmitad/tests/test_announcements.py | 101 ++-- pyadmitad/tests/test_arecords.py | 72 +++ pyadmitad/tests/test_auxiliary.py | 714 ++++++++++---------------- pyadmitad/tests/test_banners.py | 117 ++--- pyadmitad/tests/test_base.py | 166 ++++++ pyadmitad/tests/test_broken_links.py | 73 +++ pyadmitad/tests/test_campaigns.py | 363 +++---------- pyadmitad/tests/test_coupons.py | 303 ++++------- pyadmitad/tests/test_deeplinks.py | 32 ++ pyadmitad/tests/test_landings.py | 52 ++ pyadmitad/tests/test_links.py | 35 ++ pyadmitad/tests/test_lost_orders.py | 84 +++ pyadmitad/tests/test_me.py | 171 ++++-- pyadmitad/tests/test_news.py | 91 ++++ pyadmitad/tests/test_optcodes.py | 137 +++++ pyadmitad/tests/test_payments.py | 177 +++---- pyadmitad/tests/test_referrals.py | 130 +++-- pyadmitad/tests/test_retag.py | 130 +++++ pyadmitad/tests/test_statistics.py | 680 ++++++++++-------------- pyadmitad/tests/test_tickets.py | 89 ++++ pyadmitad/tests/test_transport.py | 300 +++++++++++ pyadmitad/tests/test_websites.py | 452 ++++++++-------- pyadmitad/transport.py | 448 +++++----------- setup.py | 15 +- 53 files changed, 4717 insertions(+), 2758 deletions(-) create mode 100644 pyadmitad/items/arecords.py create mode 100644 pyadmitad/items/broken_links.py create mode 100644 pyadmitad/items/retag.py create mode 100644 pyadmitad/items/tickets.py create mode 100644 pyadmitad/tests/data/image.png create mode 100644 pyadmitad/tests/test_arecords.py create mode 100644 pyadmitad/tests/test_base.py create mode 100644 pyadmitad/tests/test_broken_links.py create mode 100644 pyadmitad/tests/test_deeplinks.py create mode 100644 pyadmitad/tests/test_landings.py create mode 100644 pyadmitad/tests/test_links.py create mode 100644 pyadmitad/tests/test_lost_orders.py create mode 100644 pyadmitad/tests/test_news.py create mode 100644 pyadmitad/tests/test_optcodes.py create mode 100644 pyadmitad/tests/test_retag.py create mode 100644 pyadmitad/tests/test_tickets.py create mode 100644 pyadmitad/tests/test_transport.py diff --git a/README.md b/README.md index a444393..044662d 100644 --- a/README.md +++ b/README.md @@ -13,11 +13,11 @@ Example ```python from pyadmitad import api -client_id = "" -client_secret = "" -scope = "private_data" +client_id = "[client_id]" +client_secret = "[client_secret]" +scope = ' '.join(set([client.Me.SCOPE])) -client = api.get_oauth_password_client( +client = api.get_oauth_client_client( client_id, client_secret, scope @@ -36,227 +36,294 @@ API Items ### Me ### +```python res = client.Me.get() +``` ### Balance ### +```python res = client.Balance.get() + res = client.Balance.get(extended=True) +``` + + +### PaymentsSettings ### + +```python + res = client.PaymentsSettings.get() + res = client.PaymentsSettings.get(currency='USD') +``` ### Types of websites ### +```python res = client.WebsiteTypes.get() res = client.WebsiteTypes.get(limit=2, offset=1) +``` ### Regions of websites ### +```python res = client.WebsiteRegions.get() res = client.WebsiteRegions.get(limit=2, offset=1) +``` ### Languages ### +```python res = client.SystemLanguages.get() res = client.SystemLanguages.get(limit=2, offset=1) res = client.SystemLanguages.getOne(code='ru') +``` ### Currencies ### +```python res = client.SystemCurrencies.get() res = client.SystemCurrencies.get(limit=2, offset=1) +``` ### Advertising services ### +```python res = client.AdvertiserServices.get() res = client.AdvertiserServices.get(limit=2, offset=1) - res = client.AdvertiserServices.getOne(_id=2) res = client.AdvertiserServices.getOne(1) res = client.AdvertiserServices.getForKind(kind='website') - res = client.AdvertiserServices.getForKind('website') - res = client.AdvertiserServices.getForKindOne(_id=2, kind='website') - res = client.AdvertiserServices.getForKindOne(2, 'website') + res = client.AdvertiserServices.getForKindOne(2, kind='website') +``` ### Categories of advertising campaigns ### +```python res = client.CampaignCategories.get() + res = client.CampaignCategories.get(campaign=10, language='en') res = client.CampaignCategories.get(limit=2, offset=1) - res = client.CampaignCategories.getOne(_id=2) res = client.CampaignCategories.getOne(2) +``` ### Coupons ## ###### List of coupons ###### +```python res = client.Coupons.get() - res = client.Coupons.get(order_by=date_start) - res = client.Coupons.get(order_by=-date_end) + res = client.Coupons.get(order_by=['date_start', '-name']) + res = client.Coupons.get(order_by='-date_end') res = client.Coupons.get(campaign=1, category=2) - res = client.Coupons.get(campaign=[1, 2], category=2) - res = client.Coupons.getOne(_id=2) + res = client.Coupons.get(campaign=[1, 2], category=[2, 3]) res = client.Coupons.getOne(2) +``` ###### List of coupons for a website ###### - res = client.CouponsForWebsite.get(_id=2) +```python res = client.CouponsForWebsite.get(2) res = client.CouponsForWebsite.get(2, order_by=date_start) res = client.CouponsForWebsite.get(2, campaign=1, category=2) res = client.CouponsForWebsite.get(2, campaign=[1, 2], category=2) - res = client.CouponsForWebsite.getOne(_id=2, c_id=1) res = client.CouponsForWebsite.getOne(2, 1) +``` +###### List of coupons categories ###### + +```python + res = client.CouponsCategories.get() + res = client.CouponsCategories.get(limit=10, offset=10) + res = client.CouponsCategories.getOne(2) +``` ### Websites ### ##### List of websites ##### - res = client.Websites.get() +```python + res = client.Websites.get(limit=10) res = client.Websites.get(status='new', campaign_status='active') - res = client.Websites.getOne(_id=2) res = client.Websites.getOne(2) +``` ##### Manage websites ##### ###### Create website ###### +```python res = client.WebsitesManage.create( - regions=['RU'], - atnd_hits='20', - atnd_visits='10', name='website1', + kind='website', language='ru', + adservice=2, site_url='http://site.com', description='description', - categories=['1', '2'], - kind='website' + categories=[1, 2], + regions=['RU'], + atnd_hits=20, + atnd_visits=10, + mailing_targeting=False ) +``` ###### Update website ###### +```python res = client.WebsitesManage.update(50, name='test', language='de') +``` ###### Verify website ###### +```python res = client.WebsitesManage.verify(50) +``` ###### Delete website ###### +```python res = client.WebsitesManage.delete(50) +``` ### Statistics ### ###### Statistics by websites ###### +```python res = client.StatisticWebsites.get(website=1, campaign=1) res = client.StatisticWebsites.get(subid="ADS778") res = client.StatisticWebsites.get(limit=2) res = client.StatisticWebsites.get(date_start='01.01.2013') +``` ###### Statistics by campaigns ###### +```python res = client.StatisticCampaigns.get() res = client.StatisticCampaigns.get(website=1, campaign=1) res = client.StatisticCampaigns.get(subid="ADS778") res = client.StatisticCampaigns.get(limit=2) res = client.StatisticCampaigns.get(date_start='01.01.2013') +``` ###### Statistics by days ###### +```python res = client.StatisticDays.get() res = client.StatisticDays.get(website=1, campaign=1) res = client.StatisticDays.get(subid="ADS778") res = client.StatisticDays.get(limit=2) res = client.StatisticDays.get(date_start='01.01.2013') +``` ###### Statistics by months ###### +```python res = client.StatisticMonths.get() res = client.StatisticMonths.get(website=1, campaign=1) res = client.StatisticMonths.get(subid="ADS778") res = client.StatisticMonths.get(limit=2) res = client.StatisticMonths.get(date_start='01.01.2013') +``` ###### Statistics by actions ###### +```python res = client.StatisticActions.get() res = client.StatisticActions.get(date_start='01.01.2013') res = client.StatisticActions.get(website=1, campaign=1) res = client.StatisticActions.get(subid="ADS778") res = client.StatisticActions.get(subid2="ADS778") res = client.StatisticActions.get(limit=2) +``` ###### Statistics by sub-ids ###### +```python res = client.StatisticSubIds.get() res = client.StatisticSubIds.get(date_start='01.01.2013') res = client.StatisticSubIds.get(subid="ADS778") res = client.StatisticSubIds.get(subid1="ADS778", sub_id_number=2) res = client.StatisticSubIds.get(limit=2) +``` ###### Statistics by sources ###### +```python res = client.StatisticSources.get() res = client.StatisticSources.get(date_start='01.01.2013') res = client.StatisticSources.get(limit=2) +``` ###### Statistics by keywords ###### +```python res = client.StatisticKeywords.get() res = client.StatisticKeywords.get(date_start='01.01.2013') res = client.StatisticKeywords.get(limit=2) +``` ### Referrals ### +```python res = client.Referrals.get() res = client.Referrals.get(limit=2) - res = client.Referrals.getOne(_id=2) res = client.Referrals.getOne(2) +``` ### Banners ### ###### List of banners ###### - res = client.Banners.get(_id=2) +```python res = client.Banners.get(2) - res = client.Banners.get(2, limit=2) + res = client.Banners.get(2, mobile_content=False, limit=2) +``` ###### List of banners for a website ###### +```python res = client.BannersForWebsite.get(_id=2, w_id=3) res = client.BannersForWebsite.get(2, 3) - res = client.BannersForWebsite.get(2, 3, limit=5) + res = client.BannersForWebsite.get(2, 3, uri_scheme='https', limit=5) +``` ### Campaigns ### ###### List of campaigns ###### +```python res = client.Campaigns.get() res = client.Campaigns.get(limit=2) res = client.Campaigns.getOne(2) +``` ###### List of campaigns for a website ###### +```python res = client.CampaignsForWebsite.get(22) res = client.CampaignsForWebsite.get(limit=2) res = client.CampaignsForWebsite.getOne(6, 22) +``` ###### Manage campaigns ###### +```python res = client.CampaignsManage.connect(6, 22) res = client.CampaignsManage.connect(c_id=6, w_id=22) res = client.CampaignsManage.disconnect(6, 22) res = client.CampaignsManage.disconnect(c_id=6, w_id=22) +``` ### Payments ### @@ -264,16 +331,214 @@ API Items ###### List of payment ###### +```python res = client.Payments.get() - res = client.Payments.get(limit=2) + res = client.Payments.get(limit=2, has_statement=True) res = client.Payments.getOne(2) +``` + +###### Payments statement ###### + +```python + res = client.PaymentsStatement.get(12) + res = client.PaymentsStatement.get(12, detailed=True) +``` ###### Manage payments ###### +```python res = client.PaymentsManage.create('USD') res = client.PaymentsManage.confirm(71) res = client.PaymentsManage.delete(71) +``` + +### Broken links ### + +###### List of broken links ###### + +```python + res = client.BrokenLinks.get() + res = client.BrokenLinks.get(website=[10, 20], date_start='01.01.2010') + res = client.BrokenLinks.getOne(10) +``` + +###### Manage broken links ###### +```python + res = client.ManageBrokenLinks.resolve(10) + res = client.ManageBrokenLinks.resolve([10, 11, 12]) +``` + +### Announcements ### + +###### List of annouuncements ###### + +```python + res = client.Announcements.get() + res = client.Announcements.getOne(10) +``` + +### News ### + +###### List of news ###### + +```python + res = client.News.get() + res = client.News.get(limit=10, offset=20) + res = client.News.getOne(10) +``` + +### Links validator ### + +###### Validate link ###### + +```python + res = client.LinksValidator.get('https://admitad.com/some_url/') +``` + +### Landings ### + +###### List of landings ###### + +```python + res = client.Landings.get(10) + res = client.Landings.get(10, limit=100) +``` + +###### List of landings for website ###### + +```python + res = client.LandingsForWebsite.get(10, 22) + res = client.LandingsForWebsite.get(10, 22, limit=100) +``` + +### Deeplinks ### + +###### Create deeplink ###### + +```python + res = client.DeeplinksManage.create(22, 10, ulp='https://admitad.com/some/', subid='AS32djkd31') +``` + +### Referrals ### + +###### List of referrals ###### + +```python + res = client.Referrals.get() + res = client.Referrals.get(date_start='01.01.2010', date_end=datetime.today()) + res = client.Referrals.getOne(181) +``` + +### Optcodes ### + +###### List of opt-codes ###### + +```python + res = client.OptCodes.get() + res = client.OptCodes.get(campaign=100, order_by=['method', 'desc_mode') + res = client.OptCodes.getOne(11) +``` + +###### Offer status opt-codes manager ###### + +```python + res = client.OfferStatusOptCodesManager.create( + website=10, campaign=100, desc_mode=0, method=l, + url='https://admitad.com/foobarbaz/' + ) + res = client.OfferStatusOptCodesManager.update( + desc_mode=1, method=1 + ) +``` + +###### Action opt-codes manager ###### + +```python + res = client.ActionOptCodesManager.create( + website=10, campaign=100, desc_mode=0, method=l, + url='https://admitad.com/foobarbaz/', + action_type=0, status=1 + ) + res = client.ActionOptCodesManager.update( + desc_mode=1, method=1, action_type=1, status=2 + ) +``` + +### Lost orders ### + +###### List of lost orders ###### + +```python + res = client.LostOrders.get() + res = client.LostOrders.get(limit=20, offset=0) + res = client.LostOrders.getOne(76) +``` + +###### Lost orders manager ###### + +```python + res = client.LostOrdersManager.create( + attachments=['/home/user/f.png', '/home/user/s.png'], + advcampaign=100, website=10, + order_id='039NRUHFJEW', order_date='12.08.2016', order_price=345.77, + comment='some comment' + ) + res = client.LostOrdersManager.delete(77) +``` + +### Arecords ### + +###### List of arecords ###### + +```python + res = client.Arecords.get() + res = client.Arecords.get(limit=50) + res = client.Arecords.getForWebsite(10) +``` + +### Retag ### + +###### List of retag ###### + +```python + res = client.Retag.get() + res = client.Retag.get(website=10, active=False, limit=50) + res = client.Retag.getOne(54) + res = client.Retag.getLevelsForWebsite(10) + res = client.Retag.getLevelsForCampaign(100) +``` + +###### Retag manager ###### + +```python + res = client.RetagManager.create( + website=10, level=22, active=False, + script='some js script', comment='some comment' + ) + res = client.RetagManager.update(16, level=10, active=True) + res = client.RetagManager.delete(88) +``` + +### Tickets ### + +###### List of tickets ###### + +```python + res = client.Tickets.get() + res = client.Tickets.get(date_start='01.01.2016', status=0) + res = client.Tickets.getOne(50) +``` + +###### Ticket manager ###### + +```python + res = client.TicketsManager.create( + subject='subject', text='some text', + campaign=100, category=27, priority=0, + ) + res = client.TicketsManager.comment(12, text='some comment') +``` Notes ------ @@ -281,5 +546,7 @@ Notes It is possible to override the default response handler by passing handler as a keyword argument to a client function call. For example: +```python func = lambda x: (x, x) result = client.Me.get(handler=func) +``` diff --git a/pyadmitad/api.py b/pyadmitad/api.py index 75976c3..58ecf58 100644 --- a/pyadmitad/api.py +++ b/pyadmitad/api.py @@ -1,25 +1,26 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad import client, transport -def get_authorizing_client(access_token, user_agent=None, debug=False): +def get_oauth_client_token(access_token, user_agent=None, debug=False): """ Creates a client using an access token. + """ - http_transport = transport.HttpTransport( - access_token, user_agent=user_agent, debug=debug) + http_transport = transport.HttpTransport(access_token, user_agent=user_agent, debug=debug) return client.Client(http_transport) -def get_oauth_client_client( - client_id, client_secret, scopes, user_agent=None, debug=False): +def get_oauth_client_client(client_id, client_secret, scopes, user_agent=None, debug=False): + """ + Creates a client using a client_id and client_secret. + + """ auth = transport.oauth_client_authorization({ 'client_id': client_id, 'client_secret': client_secret, 'scopes': scopes }) - return get_authorizing_client( - auth['access_token'], user_agent=user_agent, debug=debug) - - -def get_oauth_client(access_token, user_agent=None, debug=False): - return get_authorizing_client(access_token, user_agent=user_agent, debug=debug) + return get_oauth_client_token(auth['access_token'], user_agent=user_agent, debug=debug) diff --git a/pyadmitad/client.py b/pyadmitad/client.py index c2c2106..f6277d2 100644 --- a/pyadmitad/client.py +++ b/pyadmitad/client.py @@ -1,13 +1,7 @@ -from pyadmitad import items - - -class FailedRequest(Exception): +# coding: utf-8 +from __future__ import unicode_literals - def __init__(self, error): - self.error = error - - def __str__(self): - return repr(self.error) +from pyadmitad import items class Client(object): diff --git a/pyadmitad/constants.py b/pyadmitad/constants.py index 8e0cd64..0953fd7 100644 --- a/pyadmitad/constants.py +++ b/pyadmitad/constants.py @@ -1,13 +1,21 @@ -CURRENCIES = ('USD', 'RUB', 'EUR') +# coding: utf-8 +from __future__ import unicode_literals + # API date-format -DATE_FORMAT = "%d.%m.%Y" -LONG_DATE_FORMAT = "%d.%m.%Y %H:%M:%S" +DATE_FORMAT = '%d.%m.%Y' +LONG_DATE_FORMAT = '%d.%m.%Y %H:%M:%S' + +SUPPORTED_LANGUAGES = ('ru', 'en', 'de', 'pl', 'es', 'tr') # default values DEFAULT_REQUEST_TIMEOUT = 60 DEFAULT_LANGUAGE = 'ru' -MAX_PAGINATION_LIMIT = 200 -SUB_ID_MAX_LENGTH = 50 +DEFAULT_PAGINATION_LIMIT = 20 +DEFAULT_PAGINATION_OFFSET = 0 + +# constants +MAX_PAGINATION_LIMIT = 500 +MAX_SUB_ID_LENGTH = 250 # urls BASE_URL = 'https://api.admitad.com/' diff --git a/pyadmitad/exceptions.py b/pyadmitad/exceptions.py index b1de13c..91e4c14 100644 --- a/pyadmitad/exceptions.py +++ b/pyadmitad/exceptions.py @@ -1,8 +1,12 @@ +# coding: utf-8 +from __future__ import unicode_literals class HttpException(Exception): def __init__(self, status, message, content): + super(HttpException, self).__init__() + self.status = status self.message = message self.content = content @@ -19,6 +23,8 @@ def __repr__(self): class ConnectionException(Exception): def __init__(self, content): + super(ConnectionException, self).__init__() + self.content = content def __str__(self): @@ -31,6 +37,8 @@ def __repr__(self): class JsonException(Exception): def __init__(self, content): + super(JsonException, self).__init__() + self.content = content def __str__(self): @@ -43,6 +51,8 @@ def __repr__(self): class ApiException(Exception): def __init__(self, content): + super(ApiException, self).__init__() + self.content = content def __str__(self): diff --git a/pyadmitad/items/__init__.py b/pyadmitad/items/__init__.py index b515901..97b31ff 100644 --- a/pyadmitad/items/__init__.py +++ b/pyadmitad/items/__init__.py @@ -1,16 +1,20 @@ from pyadmitad.items.me import * +from pyadmitad.items.websites import * from pyadmitad.items.auxiliary import * +from pyadmitad.items.announcements import * +from pyadmitad.items.news import * +from pyadmitad.items.links import * +from pyadmitad.items.landings import * +from pyadmitad.items.deeplinks import * +from pyadmitad.items.referrals import * +from pyadmitad.items.payments import * from pyadmitad.items.coupons import * -from pyadmitad.items.websites import * from pyadmitad.items.statistics import * -from pyadmitad.items.referrals import * from pyadmitad.items.banners import * from pyadmitad.items.campaigns import * -from pyadmitad.items.announcements import * -from pyadmitad.items.payments import * -from pyadmitad.items.landings import * from pyadmitad.items.optcodes import * -from pyadmitad.items.news import * -from pyadmitad.items.deeplinks import * -from pyadmitad.items.links import * from pyadmitad.items.lost_orders import * +from pyadmitad.items.arecords import * +from pyadmitad.items.retag import * +from pyadmitad.items.broken_links import * +from pyadmitad.items.tickets import * diff --git a/pyadmitad/items/announcements.py b/pyadmitad/items/announcements.py index 847eeaf..405479a 100644 --- a/pyadmitad/items/announcements.py +++ b/pyadmitad/items/announcements.py @@ -1,33 +1,43 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item -__all__ = ( + +__all__ = [ 'Announcements', -) +] class Announcements(Item): """ List of announcements - Required scope - "announcements" """ + + SCOPE = 'announcements' + URL = Item.prepare_url('announcements') - SINGLE_URL = Item.prepare_url('announcements/%(id)s') + SINGLE_URL = Item.prepare_url('announcements/%(announcement_id)s') def get(self, **kwargs): """ - res = client.Announcements.get() - res = client.Announcements.get(limit=1, offset=2) + Args: + limit (int) + offset (int) + """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) def getOne(self, _id, **kwargs): """ - Here _id is an announcement id + Args: + _id (int) - res = client.Announcements.getOne(2) """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) + request_data = { + 'url': self.SINGLE_URL, + 'announcement_id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**request_data) diff --git a/pyadmitad/items/arecords.py b/pyadmitad/items/arecords.py new file mode 100644 index 0000000..e54280c --- /dev/null +++ b/pyadmitad/items/arecords.py @@ -0,0 +1,41 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from pyadmitad.items.base import Item + + +__all__ = [ + 'Arecords' +] + + +class Arecords(Item): + + SCOPE = 'arecords' + + URL = Item.prepare_url('arecords') + FOR_WEBSITE_URL = Item.prepare_url('arecords/%(website_id)s') + + def get(self, **kwargs): + """ + Args: + limit (int) + offset (int) + + """ + + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + + def getForWebsite(self, website_id, **kwargs): + """ + Args: + website_id (int) + + """ + + request_data = { + 'url': self.FOR_WEBSITE_URL, + 'website_id': Item.sanitize_id(website_id) + } + + return self.transport.get().request(**request_data) diff --git a/pyadmitad/items/auxiliary.py b/pyadmitad/items/auxiliary.py index 215199d..253cbf4 100644 --- a/pyadmitad/items/auxiliary.py +++ b/pyadmitad/items/auxiliary.py @@ -1,3 +1,6 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item @@ -15,90 +18,109 @@ class WebsiteTypes(Item): """ List of websites types - Required scope - "public_data" """ + SCOPE = 'public_data' + URL = Item.prepare_url('websites/kinds') def get(self, **kwargs): """ - res = client.WebsiteTypes.get() - res = client.WebsiteTypes.get(limit=2, offset=1) - res = client.WebsiteTypes.get(limit=2, offset=1, language='ru') + Args: + limit (int) + offset (int) + """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) class WebsiteRegions(Item): """ List of websites regions - Required scope - "public_data" """ + SCOPE = 'public_data' + URL = Item.prepare_url('websites/regions') def get(self, **kwargs): """ - res = client.WebsiteRegions.get() - res = client.WebsiteRegions.get(limit=2, offset=1) - res = client.WebsiteRegions.get(limit=2, offset=1, language='ru') + Args: + limit (int) + offset (int) + """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) class SystemLanguages(Item): """ List of system languages - Required scope - "public_data" """ + SCOPE = 'public_data' + URL = Item.prepare_url('languages') SINGLE_URL = Item.prepare_url('languages/%(code)s') def get(self, **kwargs): """ - res = client.SystemLanguages.get() - res = client.SystemLanguages.get(limit=2, offset=1) + Args: + limit (int) + offset (int) + """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) def getOne(self, code='ru'): """ - res = client.SystemLanguages.getOne(code='ru') + Args: + code (str) + """ - return self.transport.set_method('GET').request(url=self.SINGLE_URL, code=code) + + request_data = { + 'url': self.SINGLE_URL, + 'code': Item.sanitize_string_value(code, 'code', 2, 2, False) + } + + return self.transport.get().request(**request_data) class SystemCurrencies(Item): """ List of system currencies - Required scope - "public_data" """ + SCOPE = 'public_data' + URL = Item.prepare_url('currencies') def get(self, **kwargs): """ - res = client.SystemCurrencies.get() - res = client.SystemCurrencies.get(limit=2, offset=1) + Args: + limit (int) + offset (int) + """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) class AdvertiserServices(Item): """ List of advertiser services - Required scope - "public_data" """ + SCOPE = 'public_data' + URL = Item.prepare_url('adservices') SINGLE_URL = Item.prepare_url('adservices/%(id)s') KIND_URL = Item.prepare_url('adservices/kind/%(kind)s') @@ -106,52 +128,66 @@ class AdvertiserServices(Item): def get(self, **kwargs): """ - res = client.AdvertiserServices.get() - res = client.AdvertiserServices.get(limit=2, offset=1) + Args: + limit (int) + offset (int) + """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) def getOne(self, _id, **kwargs): """ - res = client.AdvertiserServices.getOne(_id=2) - res = client.AdvertiserServices.getOne(1) + Args: + _id (int) + """ - kwargs['id'] = self.sanitize_id(_id) - kwargs['url'] = self.SINGLE_URL - return self.transport.set_method('GET').request(**kwargs) + data = { + 'url': self.SINGLE_URL, + 'id': Item.sanitize_id(_id), + } + + return self.transport.get().request(**data) def getForKind(self, kind=None, **kwargs): """ - Returns advertiser services for website types + Args: + kind (str) + limit (int) + offset (int) - res = client.AdvertiserServices.getForKind(kind='website') - res = client.AdvertiserServices.getForKind('website') """ - kwargs['kind'] = self.sanitize_non_blank_value(kind, 'kind') - kwargs['url'] = self.KIND_URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + request_data = { + 'url': self.KIND_URL, + 'kind': self.sanitize_non_blank_value(kind, 'kind'), + } + + return self.transport.get().set_pagination(**kwargs).request(**request_data) def getForKindOne(self, _id, kind, **kwargs): """ - Returns advertiser service for website types + Args: + _id (int) + kind (str) - res = client.AdvertiserServices.getForKindOne(_id=2, kind='website') - res = client.AdvertiserServices.getForKindOne(2, 'website') """ - kwargs['kind'] = self.sanitize_non_blank_value(kind, 'kind') - kwargs['id'] = self.sanitize_id(_id) - kwargs['url'] = self.KIND_SINGLE_URL - return self.transport.set_method('GET').request(**kwargs) + request_data = { + 'url': self.KIND_SINGLE_URL, + 'id': self.sanitize_id(_id), + 'kind': self.sanitize_non_blank_value(kind, 'kind'), + } + + return self.transport.get().request(**request_data) class CampaignCategories(Item): """ List of campaigns categories - Required scope - "public_data" """ + SCOPE = 'public_data' + ORDERING = ('name',) URL = Item.prepare_url('categories') @@ -159,19 +195,42 @@ class CampaignCategories(Item): def get(self, **kwargs): """ - res = client.CampaignCategories.get() - res = client.CampaignCategories.get(limit=2, offset=1) - """ - kwargs['url'] = self.URL - kwargs['allowed_ordering'] = self.ORDERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_ordering(**kwargs).request(**kwargs) + Args: + campaign (list of int) + language (str) + order_by (str) + limit (int) + offset (int) + + """ + ordering = { + 'order_by': kwargs.get('order_by', None), + 'available': self.ORDERING + } + + filtering = { + 'filter_by': kwargs, + 'available': { + 'campaign': lambda x: Item.sanitize_integer_array(x, 'campaign', True), + 'language': lambda x: Item.sanitize_string_value(x, 'language', 2, 2, True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(url=self.URL) def getOne(self, _id, **kwargs): """ - res = client.CampaignCategories.getOne(_id=2) - res = client.CampaignCategories.getOne(2) + Args: + _id (int) + """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) + request_data = { + 'url': self.SINGLE_URL, + 'id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**request_data) diff --git a/pyadmitad/items/banners.py b/pyadmitad/items/banners.py index 5755819..82b55ce 100644 --- a/pyadmitad/items/banners.py +++ b/pyadmitad/items/banners.py @@ -1,54 +1,95 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item -__all__ = ( +__all__ = [ 'Banners', 'BannersForWebsite', -) +] class Banners(Item): """ List of banners - Required scope - "banners" """ - URL = Item.prepare_url('banners/%(id)s') + SCOPE = 'banners' + + URL = Item.prepare_url('banners/%(campaign_id)s') def get(self, _id, **kwargs): """ Here _id is an id of advertising campaign - res = client.Banners.get(_id=2) - res = client.Banners.get(2) - res = client.Banners.get(2, limit=2) + Args: + _id (int) + mobile_content (bool) + limit (int) + offset(int) """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + request_data = { + 'url': self.URL, + 'campaign_id': Item.sanitize_id(_id) + } + + filtering = { + 'filter_by': kwargs, + 'available': { + 'mobile_content': lambda x: Item.sanitize_bool_value(x, blank=True) + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(**request_data) class BannersForWebsite(Item): """ List of banners for the website - Required scope - "banners_for_website" """ - URL = Item.prepare_url('banners/%(id)s/website/%(w_id)s') + SCOPE = 'banners_for_website' + + URL = Item.prepare_url('banners/%(campaign_id)s/website/%(website_id)s') def get(self, _id, w_id, **kwargs): """ Here _id is an id of advertising campaign and w_id is a id of website - res = client.BannersForWebsite.get(_id=2, w_id=3) - res = client.BannersForWebsite.get(2, 3) - res = client.BannersForWebsite.get(2, 3, limit=5) + Args: + _id (int) + w_id (int) + mobile_content (bool) + landing (int) + uri_scheme (str) + limit (int) + offset (int) + """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['w_id'] = self.sanitize_id(w_id) - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + request_data = { + 'url': self.URL, + 'campaign_id': Item.sanitize_id(_id), + 'website_id': Item.sanitize_id(w_id) + } + + filtering = { + 'filter_by': kwargs, + 'available': { + 'mobile_content': lambda x: Item.sanitize_bool_value(x, blank=True), + 'landing': lambda x: Item.sanitize_integer_value(x, 'landing', blank=True), + 'uri_scheme': lambda x: Item.sanitize_string_value(x, 'uri_scheme', blank=True) + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(**request_data) diff --git a/pyadmitad/items/base.py b/pyadmitad/items/base.py index abd2a05..f697f9f 100644 --- a/pyadmitad/items/base.py +++ b/pyadmitad/items/base.py @@ -1,7 +1,14 @@ -from copy import deepcopy +# coding: utf-8 +from __future__ import unicode_literals + +from future import standard_library +standard_library.install_aliases() + +from future.builtins import int, str from datetime import datetime, date -from pyadmitad.constants import DATE_FORMAT, BASE_URL,\ - CURRENCIES, LONG_DATE_FORMAT +from urllib.parse import urljoin + +from pyadmitad.constants import BASE_URL, DATE_FORMAT, LONG_DATE_FORMAT class Item(object): @@ -10,39 +17,35 @@ def __init__(self, transport): self.transport = transport self.transport.clean_data() - def sanitize_id(self, _id, name='_id'): - return self.sanitize_integer_value(_id, name) - @staticmethod def sanitize_fields(fields, **kwargs): - data = deepcopy(kwargs) - for field in fields: - data[field] = fields[field](data.get(field)) - return dict([(key, value) for (key, value) in data.items() if value]) + return {key: func(kwargs.get(key, None)) for (key, func) in fields.items()} + + @staticmethod + def sanitize_id(_id, name='_id'): + if _id == 0: + raise ValueError('Invalid value for `id`: %s' % (_id)) + return Item.sanitize_integer_value(_id, name, False) @staticmethod def sanitize_non_blank_value(value, name): - if not value: + if value in [[], {}, (), '', None]: raise ValueError("Invalid non-blank value '%s': %s" % (name, value)) return value @staticmethod - def sanitize_string_value( - value, name, max_length=None, min_length=None, blank=False): + def sanitize_string_value(value, name, max_length=None, min_length=None, blank=False): if not value: if not blank: - raise ValueError( - "Invalid string value '%s': %s. Cannot be blank." % - (name, value)) + raise ValueError("Invalid string value '%s': %s. Cannot be blank." % + (name, value)) return value if max_length and len(value) > max_length: - raise ValueError( - "Invalid string value '%s': %s. Max length: %s" % - (name, value, max_length)) + raise ValueError("Invalid string value '%s': %s. Max length: %s" % + (name, value, max_length)) if min_length and len(value) < min_length: - raise ValueError( - "Invalid string value '%s': %s. Min length: %s" % - (name, value, min_length)) + raise ValueError("Invalid string value '%s': %s. Min length: %s" % + (name, value, min_length)) return value @staticmethod @@ -51,84 +54,113 @@ def sanitize_integer_value(value, name, blank=False): if not blank: raise ValueError("Blank integer value '%s': %s" % (name, value)) return value - if type(value) == int: - return str(value) - elif type(value) == str: - if value.isdigit(): - return value + if isinstance(value, int): + return value + elif isinstance(value, str) and value.isdigit(): + return value raise ValueError("Invalid integer value '%s': %s" % (name, value)) @staticmethod def sanitize_float_value(value, name, blank=False): - if not value: + if value is None: if not blank: raise ValueError("Blank float value '%s': %s" % (name, value)) return value - if type(value) in (float, int): - return str(value) - elif type(value) == str: + if isinstance(value, (float, int)): + return value + elif isinstance(value, str): try: float(value) return value except ValueError: - raise ValueError("Invalid float value '%s': %s" % (name, value)) + pass raise ValueError("Invalid float value '%s': %s" % (name, value)) @staticmethod def sanitize_integer_array(values, name, blank=False): if not values: if not blank: - raise ValueError( - "Blank integer values '%s': %s" % (name, values)) + raise ValueError("Blank integer values '%s': %s" % (name, values)) return values + if not isinstance(values, (list, tuple, set)): + values = [values] return [Item.sanitize_integer_value(x, name, blank=blank) for x in values] @staticmethod - def sanitize_string_array( - values, name, max_length=None, min_length=None, blank=False): + def sanitize_string_array(values, name, max_length=None, min_length=None, blank=False): if not values: if not blank: - raise ValueError( - "Blank string values '%s': %s" % (name, values)) + raise ValueError("Blank string values '%s': %s" % (name, values)) return values - return [Item.sanitize_string_value( - x, name, max_length=max_length, min_length=min_length, blank=blank) - for x in values] + if not isinstance(values, (list, tuple, set)): + values = [values] + return [Item.sanitize_string_value(x, name, max_length=max_length, min_length=min_length, blank=blank) + for x in values] + + @staticmethod + def sanitize_bool_value(value, name='', blank=False): + if value is None: + if not blank: + raise ValueError("Blank bool value '%s': %s" % (name, value)) + return False + return str(bool(value)).lower() @staticmethod - def sanitize_currency(value, blank=True): + def sanitize_bool_integer_value(value, name='', blank=False): + if value is None: + if not blank: + raise ValueError("Blank bool value '%s': %s" % (name, value)) + return False + return 1 if value else 0 + + @staticmethod + def sanitize_currency_value(value, blank=True): if not value: if not blank: - raise ValueError( - "Blank currency value: %s" % value) + raise ValueError("Blank currency value: %s" % value) return value - if value not in CURRENCIES: - raise ValueError( - "Invalid currency value: %s" % value) - return value + if not len(value) == 3: + raise ValueError("Invalid currency value: %s" % value) + return value.upper() @staticmethod - def check_date(dt): - s = datetime.strptime(dt, DATE_FORMAT).date() - if s > date.today(): - s = date.today() - return s.strftime(DATE_FORMAT) + def sanitize_date(value, name, blank=False): + if value is None: + if not blank: + raise ValueError("Blank date value for '%s'" % name) + return None + if isinstance(value, datetime): + return value.date().strftime(DATE_FORMAT) + elif isinstance(value, date): + return value.strftime(DATE_FORMAT) + elif isinstance(value, str): + try: + datetime.strptime(value, DATE_FORMAT) + except ValueError: + raise ValueError("Invalid date: %s" % value) + return value + raise ValueError("Invalid date: %s" % value) @staticmethod - def check_long_date(dt): - s = datetime.strptime(dt, LONG_DATE_FORMAT) - if s > datetime.now(): - s = datetime.now() - return s.strftime(LONG_DATE_FORMAT) + def sanitize_long_date(value, name, blank=False): + if value is None: + if not blank: + raise ValueError("Blank date value for '%s'" % name) + return None + if isinstance(value, datetime): + return value.strftime(LONG_DATE_FORMAT) + elif isinstance(value, str): + try: + datetime.strptime(value, LONG_DATE_FORMAT) + except ValueError: + raise ValueError("Invalid date: %s" % value) + return value + raise ValueError("Invalid date: %s" % value) @staticmethod def prepare_url(path): - url = '%s%s' % (BASE_URL, path) + url = urljoin(BASE_URL, path) if not url.endswith('/'): url += '/' return url - - @staticmethod - def to_unicode(text): - return u'%s' % text diff --git a/pyadmitad/items/broken_links.py b/pyadmitad/items/broken_links.py new file mode 100644 index 0000000..7a578e4 --- /dev/null +++ b/pyadmitad/items/broken_links.py @@ -0,0 +1,88 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from pyadmitad.items.base import Item + + +__all__ = [ + 'BrokenLinks', + 'ManageBrokenLinks' +] + + +class BrokenLinks(Item): + + SCOPE = 'broken_links' + + URL = Item.prepare_url('broken_links') + SINGLE_URL = Item.prepare_url('broken_links/%(broken_link_id)s') + + def get(self, **kwargs): + """ + Args: + website (list of int) + campaign (list of int) + search (str) + reason (int) + date_start (date) + date_end (date) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'website': lambda x: Item.sanitize_integer_array(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_array(x, 'campaign', blank=True), + 'search': lambda x: Item.sanitize_string_value(x, 'search', blank=True), + 'reason': lambda x: Item.sanitize_integer_value(x, 'reason', blank=True), + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, broken_link_id): + """ + Args: + broken_link_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'broken_link_id': Item.sanitize_id(broken_link_id) + } + + return self.transport.get().request(**request_data) + + +class ManageBrokenLinks(Item): + + SCOPE = 'manage_broken_links' + + RESOLVE_URL = Item.prepare_url('broken_links/resolve') + + def resolve(self, broken_link_ids): + """ + Args: + broken_links_ids (list of int) + + """ + + filtering = { + 'filter_by': { + 'link_id': broken_link_ids + }, + 'available': { + 'link_id': lambda x: Item.sanitize_integer_array(x, 'link_id', blank=True) + } + } + + return self.transport.post() \ + .set_filtering(filtering) \ + .request(url=self.RESOLVE_URL) diff --git a/pyadmitad/items/campaigns.py b/pyadmitad/items/campaigns.py index 3b548f8..7efd859 100644 --- a/pyadmitad/items/campaigns.py +++ b/pyadmitad/items/campaigns.py @@ -1,109 +1,156 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item -__all__ = ( +__all__ = [ 'Campaigns', 'CampaignsForWebsite', 'CampaignsManage', -) +] class Campaigns(Item): """ List of advertising campaigns - Required scope - "advcampaigns" """ + + SCOPE = 'advcampaigns' + URL = Item.prepare_url('advcampaigns') - SINGLE_URL = Item.prepare_url('advcampaigns/%(id)s') + SINGLE_URL = Item.prepare_url('advcampaigns/%(campaign_id)s') def get(self, **kwargs): """ - res = client.Campaigns.get() - res = client.Campaigns.get(limit=2) + Args: + website (int) + has_tool (list of str) + limit (int) + offset (int) + language (str) """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + filtering = { + 'filter_by': kwargs, + 'available': { + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'has_tool': lambda x: Item.sanitize_string_array(x, 'has_tool', blank=True), + 'language': lambda x: Item.sanitize_string_value(x, 'language', blank=True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) def getOne(self, _id, **kwargs): """ Here _id is an a campaign id - res = client.Campaigns.getOne(2) + Args: + _id (int) + """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) + request_data = { + 'url': self.SINGLE_URL, + 'campaign_id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**request_data) class CampaignsForWebsite(Item): """ List of advertising campaigns for a website - Required scope - "advcampaigns_for_website" """ - URL = Item.prepare_url('advcampaigns/website/%(id)s') - SINGLE_URL = Item.prepare_url('advcampaigns/%(c_id)s/website/%(id)s') + + SCOPE = 'advcampaigns_for_website' + + URL = Item.prepare_url('advcampaigns/website/%(website_id)s') + SINGLE_URL = Item.prepare_url('advcampaigns/%(campaign_id)s/website/%(website_id)s') def get(self, _id, **kwargs): """ Here _id is a website id - res = client.CampaignsForWebsite.get(22) - res = client.CampaignsForWebsite.get(limit=2) + Args: + _id (int) + limit (int) + offset (int) """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + request_data = { + 'url': self.URL, + 'website_id': Item.sanitize_id(_id) + } + + return self.transport.get().set_pagination(**kwargs).request(**request_data) def getOne(self, _id, c_id, **kwargs): """ Here _id is a website id and c_id is a campaign id - res = client.CampaignsForWebsite.getOne(6, 22) + Args: + _id (int) + c_id (int) + """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['c_id'] = self.sanitize_id(c_id) - return self.transport.set_method('GET').request(**kwargs) + request_data = { + 'url': self.SINGLE_URL, + 'website_id': Item.sanitize_id(_id), + 'campaign_id': Item.sanitize_id(c_id) + } + + return self.transport.get().request(**request_data) class CampaignsManage(Item): """ Manage an advertising campaign - Required scope - "manage_advcampaigns" """ - CONNECT_URL = Item.prepare_url('advcampaigns/%(c_id)s/attach/%(w_id)s') - DISCONNECT_URL = Item.prepare_url('advcampaigns/%(c_id)s/detach/%(w_id)s') - def _request(self, c_id, w_id, **kwargs): - kwargs['c_id'] = self.sanitize_id(c_id) - kwargs['w_id'] = self.sanitize_id(w_id) - return self.transport.set_method('POST').request(**kwargs) + SCOPE = 'manage_advcampaigns' + + CONNECT_URL = Item.prepare_url('advcampaigns/%(campaign_id)s/attach/%(website_id)s') + DISCONNECT_URL = Item.prepare_url('advcampaigns/%(campaign_id)s/detach/%(website_id)s') def connect(self, c_id, w_id, **kwargs): """ Connect an advertising campaign for a website Here w_id is a website id and c_id is a campaign id - res = client.CampaignsManage.connect(6, 22) - res = client.CampaignsManage.connect(c_id=6, w_id=22) + Args: + c_id (int) + w_id (int) """ - kwargs['url'] = self.CONNECT_URL - return self._request(c_id, w_id, **kwargs) + request_data = { + 'url': self.CONNECT_URL, + 'campaign_id': Item.sanitize_id(c_id), + 'website_id': Item.sanitize_id(w_id) + } + + return self.transport.post().request(**request_data) def disconnect(self, c_id, w_id, **kwargs): """ Disconnect an advertising campaign from a website Here w_id is a website id and c_id is a campaign id - res = client.CampaignsManage.disconnect(6, 22) - res = client.CampaignsManage.disconnect(c_id=6, w_id=22) + Args: + c_id (int) + w_id (int) """ - kwargs['url'] = self.DISCONNECT_URL - return self._request(c_id, w_id, **kwargs) + request_data = { + 'url': self.DISCONNECT_URL, + 'campaign_id': Item.sanitize_id(c_id), + 'website_id': Item.sanitize_id(w_id) + } + + return self.transport.post().request(**request_data) diff --git a/pyadmitad/items/coupons.py b/pyadmitad/items/coupons.py index 6c337ac..cfba38b 100644 --- a/pyadmitad/items/coupons.py +++ b/pyadmitad/items/coupons.py @@ -1,21 +1,24 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item -__all__ = ( +__all__ = [ 'Coupons', 'CouponsForWebsite', 'CouponsCategories', -) +] class CouponsBase(Item): ORDERING = ('name', 'date_start', 'date_end', 'rating',) FILTERING = { - 'campaign': int, - 'campaign_category': int, - 'category': int, - 'type': int + 'campaign': lambda x: Item.sanitize_integer_array(x, 'campaign', blank=True), + 'campaign_category': lambda x: Item.sanitize_integer_array(x, 'campaign_category', blank=True), + 'category': lambda x: Item.sanitize_integer_array(x, 'category', blank=True), + 'type': lambda x: Item.sanitize_string_value(x, 'type', blank=True), } @@ -23,99 +26,145 @@ class Coupons(CouponsBase): """ List of coupons - Required scope - "coupons" """ + SCOPE = 'coupons' + URL = Item.prepare_url('coupons') - SINGLE_URL = Item.prepare_url('coupons/%(id)s') + SINGLE_URL = Item.prepare_url('coupons/%(coupon_id)s') def get(self, **kwargs): """ - res = client.Coupons.get() - res = client.Coupons.get(order_by=date_start) - res = client.Coupons.get(order_by=-date_end) - res = client.Coupons.get(campaign=1, category=2) - - If you want to filter by many values of the same key: - on example - campaign=1, campaign=2: - - use campaign=[1, 2] - - res = client.Coupons.get(campaign=[1, 2], category=2) + Args: + campaign (list of int) + campaign_category (list of int) + category (list of int) + type (str) + limit (int) + offset (int) + order_by (str) """ - kwargs['url'] = self.URL - kwargs['allowed_ordering'] = self.ORDERING - kwargs['allowed_filtering'] = self.FILTERING - return self.transport.get().set_pagination(**kwargs).\ - set_ordering(**kwargs).set_filtering(**kwargs).request(**kwargs) + filtering = { + 'filter_by': kwargs, + 'available': self.FILTERING + } + + ordering = { + 'order_by': kwargs.get('order_by', None), + 'available': self.ORDERING + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(url=self.URL) def getOne(self, _id, **kwargs): """ - res = client.Coupons.getOne(_id=2) - res = client.Coupons.getOne(2) + Args: + _id (int) + """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.get().request(**kwargs) + request_data = { + 'url': self.SINGLE_URL, + 'coupon_id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**request_data) class CouponsForWebsite(CouponsBase): """ List of the website coupons - Required scope - "coupons_for_website" """ - URL = Item.prepare_url('coupons/website/%(id)s') - SINGLE_URL = Item.prepare_url('coupons/%(c_id)s/website/%(id)s') + SCOPE = 'coupons_for_website' + + URL = Item.prepare_url('coupons/website/%(website_id)s') + SINGLE_URL = Item.prepare_url('coupons/%(campaign_id)s/website/%(website_id)s') def get(self, _id, **kwargs): """ - Here id is a websites id - - res = client.CouponsForWebsite.get(_id=2) - res = client.CouponsForWebsite.get(2) - res = client.CouponsForWebsite.get(2, order_by=date_start) - res = client.CouponsForWebsite.get(2, campaign=1, category=2) - - If you want to filter by many values of the same key: - on example - campaign=1, campaign=2: - - use campaign=[1, 2] + Here _id is a websites id + + Args: + _id (int) + campaign (list of int) + campaign_category (list of int) + category (list of int) + type (str) + limit (int) + offset (int) + order_by (str) - res = client.CouponsForWebsite.get(2, campaign=[1, 2], category=2) """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['allowed_ordering'] = self.ORDERING - kwargs['allowed_filtering'] = self.FILTERING - return self.transport.get().set_pagination(**kwargs).\ - set_ordering(**kwargs).set_filtering(**kwargs).request(**kwargs) + request_data = { + 'url': self.URL, + 'website_id': Item.sanitize_id(_id) + } + + filtering = { + 'filter_by': kwargs, + 'available': self.FILTERING + } + + ordering = { + 'order_by': kwargs.get('order_by', None), + 'available': self.ORDERING + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(**request_data) def getOne(self, _id, c_id, **kwargs): """ - Here id is a websites id and c_id is a coupon id + Here _id is a websites id and c_id is a coupon id + + Args: + _id (int) + c_id (int) - res = client.CouponsForWebsite.getOne(_id=2, c_id=1) - res = client.CouponsForWebsite.getOne(2, 1) """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['c_id'] = self.sanitize_id(c_id) - return self.transport.get().request(**kwargs) + request_data = { + 'url': self.SINGLE_URL, + 'website_id': Item.sanitize_id(_id), + 'campaign_id': Item.sanitize_id(c_id) + } + + return self.transport.get().request(**request_data) class CouponsCategories(CouponsBase): + SCOPE = 'public_data' + URL = Item.prepare_url('coupons/categories') SINGLE_URL = Item.prepare_url('coupons/categories/%(coupon_category_id)s') def get(self, **kwargs): - kwargs['url'] = self.URL - return self.transport.get().set_pagination(**kwargs).request(**kwargs) + """ + Args: + limit (int) + offset (int) + + """ + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + + def getOne(self, coupon_category_id): + """ + Args: + coupon_category_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'coupon_category_id': Item.sanitize_id(coupon_category_id) + } - def getOne(self, coupon_category_id, **kwargs): - kwargs['url'] = self.SINGLE_URL - kwargs['coupon_category_id'] = self.sanitize_id(coupon_category_id) - return self.transport.get().request(**kwargs) + return self.transport.get().request(**request_data) diff --git a/pyadmitad/items/deeplinks.py b/pyadmitad/items/deeplinks.py index 9196c03..0703593 100644 --- a/pyadmitad/items/deeplinks.py +++ b/pyadmitad/items/deeplinks.py @@ -1,3 +1,6 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item @@ -8,17 +11,31 @@ class DeeplinksManage(Item): + SCOPE = 'deeplink_generator' + CREATE_URL = Item.prepare_url('deeplink/%(website_id)s/advcampaign/%(campaign_id)s') CREATE_FIELDS = { 'ulp': lambda x: Item.sanitize_string_value(x, 'ulp'), - 'subid': lambda x: Item.sanitize_string_value(x, 'subid', max_length=30, blank=True), + 'subid': lambda x: Item.sanitize_string_value(x, 'subid', max_length=30), # todo: subid[1-4] } def create(self, website_id, campaign_id, **kwargs): - data = self.sanitize_fields(self.CREATE_FIELDS, **kwargs) - kwargs['url'] = self.CREATE_URL - kwargs['website_id'] = self.sanitize_id(website_id) - kwargs['campaign_id'] = self.sanitize_id(campaign_id) - return self.transport.get().set_data(data).request(**kwargs) + """ + Args: + website_id (int) + campaign_id (int) + ulp (str) + subid (str) + + """ + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + + request_data = { + 'url': self.CREATE_URL, + 'website_id': Item.sanitize_id(website_id), + 'campaign_id': Item.sanitize_id(campaign_id), + } + + return self.transport.get().set_data(data).request(**request_data) diff --git a/pyadmitad/items/landings.py b/pyadmitad/items/landings.py index 712ac23..5e91f58 100644 --- a/pyadmitad/items/landings.py +++ b/pyadmitad/items/landings.py @@ -1,5 +1,9 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item + __all__ = [ 'Landings', 'LandingsForWebsite', @@ -8,20 +12,45 @@ class Landings(Item): + SCOPE = 'landings' + URL = Item.prepare_url('landings/%(campaign_id)s') def get(self, campaign_id, **kwargs): - kwargs['url'] = self.URL - kwargs['campaign_id'] = self.sanitize_id(campaign_id) - return self.transport.get().set_pagination(**kwargs).request(**kwargs) + """ + Args: + campaign_id (int) + limit (int) + offset (int) + + """ + request_data = { + 'url': self.URL, + 'campaign_id': Item.sanitize_id(campaign_id), + } + + return self.transport.get().set_pagination(**kwargs).request(**request_data) class LandingsForWebsite(Item): + SCOPE = 'landings' + URL = Item.prepare_url('landings/%(campaign_id)s/website/%(website_id)s') def get(self, campaign_id, website_id, **kwargs): - kwargs['url'] = self.URL - kwargs['campaign_id'] = self.sanitize_id(campaign_id) - kwargs['website_id'] = self.sanitize_id(website_id) - return self.transport.get().set_pagination(**kwargs).request(**kwargs) + """ + Args: + campaign_id (int) + website_id (int) + limit (int) + offset (int) + + """ + request_data = { + 'url': self.URL, + 'campaign_id': Item.sanitize_id(campaign_id), + 'website_id': Item.sanitize_id(website_id), + } + + return self.transport.get().set_pagination(**kwargs).request(**request_data) diff --git a/pyadmitad/items/links.py b/pyadmitad/items/links.py index 7da1978..03ad3b4 100644 --- a/pyadmitad/items/links.py +++ b/pyadmitad/items/links.py @@ -1,8 +1,13 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item class LinksValidator(Item): + SCOPE = 'validate_links' + URL = Item.prepare_url('validate_links') GET_FIELDS = { @@ -10,6 +15,11 @@ class LinksValidator(Item): } def get(self, link, **kwargs): - data = self.sanitize_fields(self.GET_FIELDS, link=link) - kwargs['url'] = self.URL - return self.transport.get().set_data(data).request(**kwargs) + """ + Args: + link (str) + + """ + data = Item.sanitize_fields(self.GET_FIELDS, link=link) + + return self.transport.get().set_data(data).request(url=self.URL) diff --git a/pyadmitad/items/lost_orders.py b/pyadmitad/items/lost_orders.py index 34710ac..6cc70fe 100644 --- a/pyadmitad/items/lost_orders.py +++ b/pyadmitad/items/lost_orders.py @@ -1,4 +1,5 @@ # coding: utf-8 +from __future__ import unicode_literals from pyadmitad.items.base import Item @@ -17,13 +18,26 @@ class LostOrders(Item): SINGLE_URL = Item.prepare_url('lost_orders/%(lost_order_id)s') def get(self, **kwargs): - kwargs['url'] = self.URL - return self.transport.get().set_pagination().request(**kwargs) + """ + Args: + limit (int) + offset (int) + + """ + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + + def getOne(self, lost_order_id): + """ + Args: + lost_order_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'lost_order_id': Item.sanitize_id(lost_order_id) + } - def getOne(self, lost_order_id, **kwargs): - kwargs['url'] = self.SINGLE_URL - kwargs['lost_order_id'] = self.sanitize_id(lost_order_id) - return self.transport.get().request(**kwargs) + return self.transport.get().request(**request_data) class LostOrdersManager(Item): @@ -37,22 +51,37 @@ class LostOrdersManager(Item): 'advcampaign': lambda x: Item.sanitize_integer_value(x, 'advcampaign'), 'website': lambda x: Item.sanitize_integer_value(x, 'website'), 'order_id': lambda x: Item.sanitize_string_value(x, 'order_id'), - 'order_date': lambda x: Item.sanitize_string_value(x, 'order_date'), + 'order_date': lambda x: Item.sanitize_date(x, 'order_date'), 'order_price': lambda x: Item.sanitize_float_value(x, 'order_price'), 'comment': lambda x: Item.sanitize_string_value(x, 'comment'), } def delete(self, lost_order_id): - data = { + """ + Args: + lost_order_id (int) + + """ + request_data = { 'url': self.DELETE_URL, - 'lost_order_id': self.sanitize_id(lost_order_id), + 'lost_order_id': Item.sanitize_id(lost_order_id), } - return self.transport.set_method('DELETE').request(**data) - def create(self, attachment, **kwargs): - data = self.sanitize_fields(self.CREATE_FIELDS, **kwargs) - kwargs['url'] = self.CREATE_URL - files = { - 'attachment': attachment, - } - return self.transport.post().set_data(data).set_files(files).request(**kwargs) + return self.transport.delete().request(**request_data) + + def create(self, attachments, **kwargs): + """ + Args: + attachments (list of str) + advcampaign (int) + website (int) + order_id (str) + order_date (date) + order_price (float) + comment (str) + + """ + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + files = [('attachment', open(item, 'rb')) for item in Item.sanitize_string_array(attachments, 'attachments')] + + return self.transport.post().set_data(data).set_files(files).request(url=self.CREATE_URL) diff --git a/pyadmitad/items/me.py b/pyadmitad/items/me.py index 8175b6d..5ab521e 100644 --- a/pyadmitad/items/me.py +++ b/pyadmitad/items/me.py @@ -1,9 +1,13 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item __all__ = ( 'Me', 'Balance', + 'PaymentsSettings', ) @@ -11,38 +15,67 @@ class Me(Item): """ Get private information - Required scope - "private_data"|"private_data_email"|"private_data_phone" """ - def __call__(self, **kwargs): - return self.get(**kwargs) + SCOPE = 'private_data private_data_email private_data_phone' URL = Item.prepare_url('me') - def get(self, **kwargs): - """ - res = client.Me.get() - res = client.Me.get(language='ru') - """ - kwargs['url'] = self.URL - return self.transport.set_method("GET").request(**kwargs) + def __call__(self): + return self.get() + + def get(self): + return self.transport.get().request(url=self.URL) class Balance(Item): """ Get balance information - Required scope - "private_data_balance" """ - def __call__(self, **kwargs): - return self.get(**kwargs) + SCOPE = 'private_data_balance' URL = Item.prepare_url('me/balance') + EXTENDED_URL = Item.prepare_url('me/balance/extended') + + def __call__(self, **kwargs): + return self.get(**kwargs) def get(self, **kwargs): """ - res = client.Balance.get() + Args: + extended (bool) + """ - kwargs['url'] = self.URL - return self.transport.set_method("GET").request(**kwargs) + url = self.EXTENDED_URL if kwargs.get('extended', False) else self.URL + + return self.transport.get().request(url=url) + + +class PaymentsSettings(Item): + """ + Get payments settings by currency + + """ + + SCOPE = 'private_data_balance' + + URL = Item.prepare_url('me/payment/settings') + CURRENCY_URL = Item.prepare_url('me/payment/settings/%(currency)s') + + def __call__(self, **kwargs): + return self.get(**kwargs) + + def get(self, currency=None): + """ + Args: + currency (str) + + """ + request_data = { + 'currency': Item.sanitize_currency_value(currency, blank=True), + 'url': self.CURRENCY_URL if currency else self.URL + } + + return self.transport.get().request(**request_data) diff --git a/pyadmitad/items/news.py b/pyadmitad/items/news.py index 8eaf46e..55eb334 100644 --- a/pyadmitad/items/news.py +++ b/pyadmitad/items/news.py @@ -1,3 +1,6 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item @@ -7,15 +10,34 @@ class News(Item): + """ + List of news + + """ + + SCOPE = 'public_data' URL = Item.prepare_url('news') SINGLE_URL = Item.prepare_url('news/%(news_id)s') def get(self, **kwargs): - kwargs['url'] = self.URL - return self.transport.get().set_pagination(**kwargs).request(**kwargs) + """ + Args: + limit (int) + offset (int) + + """ + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + + def getOne(self, news_id): + """ + Args: + news_id (int) + + """ + data = { + 'url': self.SINGLE_URL, + 'news_id': self.sanitize_id(news_id) + } - def getOne(self, news_id, **kwargs): - kwargs['url'] = self.SINGLE_URL - kwargs['news_id'] = self.sanitize_id(news_id) - return self.transport.get().set_pagination(**kwargs).request(**kwargs) + return self.transport.get().request(**data) diff --git a/pyadmitad/items/optcodes.py b/pyadmitad/items/optcodes.py index 46b6b14..860f058 100644 --- a/pyadmitad/items/optcodes.py +++ b/pyadmitad/items/optcodes.py @@ -1,3 +1,6 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item @@ -32,21 +35,60 @@ class BaseOptCodes(Item): class OptCodes(BaseOptCodes): + SCOPE = 'opt_codes' + + ORDERING = ('action_type', 'method', 'desc_mode') + URL = Item.prepare_url('opt_codes') SINGLE_URL = Item.prepare_url('opt_codes/%(optcode_id)s') def get(self, **kwargs): - kwargs['url'] = self.URL - return self.transport.get().set_pagination().request(**kwargs) + """ + Args: + campaign (int) + website (int) + limit (int) + offset (int) + order_by (list of str) + + """ + ordering = { + 'order_by': kwargs.get('order_by', []), + 'available': self.ORDERING + } + + filtering = { + 'filter_by': kwargs, + 'available': { + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(url=self.URL) def getOne(self, optcode_id, **kwargs): - kwargs['url'] = self.SINGLE_URL - kwargs['optcode_id'] = self.sanitize_id(optcode_id) - return self.transport.get().request(**kwargs) + """ + Args: + optcode_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'optcode_id': Item.sanitize_id(optcode_id) + } + + return self.transport.get().request(**request_data) class BaseOptCodesManager(BaseOptCodes): + SCOPE = 'manage_opt_codes' + DELETE_URL = Item.prepare_url('opt_codes/delete/%(optcode_id)s') CREATE_URL = '' UPDATE_URL = '' @@ -55,22 +97,24 @@ class BaseOptCodesManager(BaseOptCodes): UPDATE_FIELDS = {} def delete(self, optcode_id): - data = { + request_data = { 'url': self.DELETE_URL, - 'optcode_id': self.sanitize_id(optcode_id), + 'optcode_id': Item.sanitize_id(optcode_id), } - return self.transport.set_method('POST').request(**data) + return self.transport.post().request(**request_data) def create(self, **kwargs): - data = self.sanitize_fields(self.CREATE_FIELDS, **kwargs) - kwargs['url'] = self.CREATE_URL - return self.transport.set_method('POST').set_data(data).request(**kwargs) + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + return self.transport.post().set_data(data).request(url=self.CREATE_URL) def update(self, optcode_id, **kwargs): - data = self.sanitize_fields(self.UPDATE_FIELDS, **kwargs) - kwargs['url'] = self.UPDATE_URL - kwargs['optcode_id'] = self.sanitize_id(optcode_id) - return self.transport.set_method('POST').set_data(data).request(**kwargs) + data = Item.sanitize_fields(self.UPDATE_FIELDS, **kwargs) + request_data = { + 'url': self.UPDATE_URL, + 'optcode_id': Item.sanitize_id(optcode_id), + } + + return self.transport.post().set_data(data).request(**request_data) class OfferStatusOptCodesManager(BaseOptCodesManager): diff --git a/pyadmitad/items/payments.py b/pyadmitad/items/payments.py index 8254bef..5559ea0 100644 --- a/pyadmitad/items/payments.py +++ b/pyadmitad/items/payments.py @@ -1,83 +1,149 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item -__all__ = ( +__all__ = [ 'Payments', + 'PaymentsStatement', 'PaymentsManage', -) +] class Payments(Item): """ List of webmaster payments - Required scope - "payments" """ + + SCOPE = 'payments' + URL = Item.prepare_url('payments') - SINGLE_URL = Item.prepare_url('payments/%(id)s') + SINGLE_URL = Item.prepare_url('payments/%(payment_id)s') def get(self, **kwargs): """ - res = client.Payments.get() - res = client.Payments.get(limit=2) + Args: + has_statement (bool) + limit (int) + offset (int) """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) + filtering = { + 'filter_by': kwargs, + 'available': { + 'has_statement': lambda x: Item.sanitize_bool_integer_value(x, 'has_statement', blank=True) + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) def getOne(self, _id, **kwargs): """ - res = client.Payments.getOne(_id=2) - res = client.Payments.getOne(2) + Args: + _id (int) + """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) + request_data = { + 'url': self.SINGLE_URL, + 'payment_id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**request_data) + + +class PaymentsStatement(Item): + + SCOPE = 'payments' + + URL = Item.prepare_url('payments/%(payment_id)s/statement') + + def get(self, payment_id, **kwargs): + """ + Args: + detailed (bool) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'detailed': lambda x: Item.sanitize_bool_integer_value(x, 'detailed', blank=True) + } + } + + request_data = { + 'url': self.URL, + 'payment_id': Item.sanitize_id(payment_id) + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(**request_data) class PaymentsManage(Item): """ Manage payments - Required scope - "manage_websites" """ + SCOPE = 'manage_payments' + CREATE_URL = Item.prepare_url('payments/request/%(code)s') - CONFIRM_URL = Item.prepare_url('payments/confirm/%(id)s') - DELETE_URL = Item.prepare_url('payments/delete/%(id)s') + CONFIRM_URL = Item.prepare_url('payments/confirm/%(payment_id)s') + DELETE_URL = Item.prepare_url('payments/delete/%(payment_id)s') def create(self, _code, **kwargs): """ Create a payment request. _code is a code of currency - res = client.PaymentsManage.create('USD') + Args: + _code (str) """ - kwargs['url'] = self.CREATE_URL - kwargs['code'] = self.sanitize_currency(_code) - return self.transport.set_method('POST').request(**kwargs) + request_data = { + 'url': self.CREATE_URL, + 'code': Item.sanitize_currency_value(_code) + } + + return self.transport.post().request(**request_data) def confirm(self, _id, **kwargs): """ Confirm a payment request. _id is a payment id. - res = client.PaymentsManage.confirm(71) + Args: + _id (int) """ - kwargs['url'] = self.CONFIRM_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('POST').request(**kwargs) + request_data = { + 'url': self.CONFIRM_URL, + 'payment_id': Item.sanitize_id(_id) + } + + return self.transport.post().request(**request_data) def delete(self, _id, **kwargs): """ Delete a payment request. _id is a payment id. - res = client.PaymentsManage.delete(71) + Args: + _id (int) """ - kwargs['url'] = self.DELETE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('POST').request(**kwargs) + request_data = { + 'url': self.DELETE_URL, + 'payment_id': Item.sanitize_id(_id) + } + + return self.transport.post().request(**request_data) diff --git a/pyadmitad/items/referrals.py b/pyadmitad/items/referrals.py index a071fb1..71d9b2a 100644 --- a/pyadmitad/items/referrals.py +++ b/pyadmitad/items/referrals.py @@ -1,41 +1,56 @@ +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item -__all__ = ( +__all__ = [ 'Referrals', -) +] class Referrals(Item): """ List of referrals - Required scope - "referrals" """ - URL = Item.prepare_url('referrals') - SINGLE_URL = Item.prepare_url('referrals/%(id)s') + SCOPE = 'referrals' - FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date - } + URL = Item.prepare_url('referrals') + SINGLE_URL = Item.prepare_url('referrals/%(referral_id)s') def get(self, **kwargs): """ - res = client.Referrals.get() - res = client.Referrals.get(limit=2) + Args: + date_start (date) + date_end (date) + limit (int) + offset (int) + """ - kwargs['url'] = self.URL - kwargs['allowed_filtering'] = self.FILTERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_filtering(**kwargs).request(**kwargs) + filtering = { + 'filter_by': kwargs, + 'available': { + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', True) + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) def getOne(self, _id, **kwargs): """ - res = client.Referrals.getOne(_id=2) - res = client.Referrals.getOne(2) + Args: + _id (int) + """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) + request_data = { + 'url': self.SINGLE_URL, + 'referral_id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**request_data) diff --git a/pyadmitad/items/retag.py b/pyadmitad/items/retag.py new file mode 100644 index 0000000..1ff5b19 --- /dev/null +++ b/pyadmitad/items/retag.py @@ -0,0 +1,150 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from pyadmitad.items.base import Item + + +__all__ = [ + 'Retag', + 'RetagManager' +] + + +class Retag(Item): + SCOPE = 'webmaster_retag' + + URL = Item.prepare_url('retag') + SINGLE_URL = Item.prepare_url('retag/%(retag_id)s') + LEVELS_FOR_WEBSITE_URL = Item.prepare_url('retag/website/%(website_id)s/levels') + LEVELS_FOR_CAMPAIGN_URL = Item.prepare_url('retag/advcampaign/%(campaign_id)s/levels') + + def get(self, **kwargs): + """ + Args: + website (int) + active (bool) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'website': lambda x: Item.sanitize_integer_value(x, 'website', True), + 'active': lambda x: Item.sanitize_bool_integer_value(x, 'active', True) + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, retag_id): + """ + Args: + retag_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'retag_id': Item.sanitize_id(retag_id) + } + + return self.transport.get().request(**request_data) + + def getLevelsForWebsite(self, website_id): + """ + Args: + website_id (int) + + """ + request_data = { + 'url': self.LEVELS_FOR_WEBSITE_URL, + 'website_id': Item.sanitize_id(website_id) + } + + return self.transport.get().request(**request_data) + + def getLevelsForCampaign(self, campaign_id): + """ + Args: + capaign_id (int) + + """ + request_data = { + 'url': self.LEVELS_FOR_CAMPAIGN_URL, + 'campaign_id': Item.sanitize_id(campaign_id) + } + + return self.transport.get().request(**request_data) + + +class RetagManager(Item): + + SCOPE = 'manage_webmaster_retag' + + CREATE_URL = Item.prepare_url('retag/create') + UPDATE_URL = Item.prepare_url('retag/update/%(retag_id)s') + DELETE_URL = Item.prepare_url('retag/delete/%(retag_id)s') + + CREATE_FIELDS = { + 'website': lambda x: Item.sanitize_integer_value(x, 'website'), + 'level': lambda x: Item.sanitize_integer_value(x, 'level'), + 'active': lambda x: Item.sanitize_bool_integer_value(x, 'active', blank=True), + 'script': lambda x: Item.sanitize_string_value(x, 'script'), + 'comment': lambda x: Item.sanitize_string_value(x, 'comment', blank=True), + } + + UPDATE_FIELDS = { + 'level': lambda x: Item.sanitize_integer_value(x, 'level', blank=True), + 'active': lambda x: Item.sanitize_bool_integer_value(x, 'active', blank=True), + 'script': lambda x: Item.sanitize_string_value(x, 'script', blank=True), + 'comment': lambda x: Item.sanitize_string_value(x, 'comment', blank=True), + } + + def create(self, **kwargs): + """ + Args: + website (int) + level (int) + active (bool) + script (str) + comment (str) + + """ + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + + return self.transport.post().set_data(data).request(url=self.CREATE_URL) + + def update(self, retag_id, **kwargs): + """ + Args: + retag_id (int) + level (int) + active (bool) + script (str) + comment (str) + + """ + request_data = { + 'url': self.UPDATE_URL, + 'retag_id': Item.sanitize_id(retag_id) + } + + data = Item.sanitize_fields(self.UPDATE_FIELDS, **kwargs) + + return self.transport.post().set_data(data).request(**request_data) + + def delete(self, retag_id): + """ + Args: + retag_id (int) + + """ + request_data = { + 'url': self.DELETE_URL, + 'retag_id': Item.sanitize_id(retag_id) + } + + return self.transport.post().request(**request_data) diff --git a/pyadmitad/items/statistics.py b/pyadmitad/items/statistics.py index 5e3db09..9e47053 100644 --- a/pyadmitad/items/statistics.py +++ b/pyadmitad/items/statistics.py @@ -1,9 +1,13 @@ +# coding: utf-8 +from __future__ import unicode_literals + from copy import copy -from pyadmitad.constants import SUB_ID_MAX_LENGTH + +from pyadmitad.constants import MAX_SUB_ID_LENGTH from pyadmitad.items.base import Item -__all__ = ( +__all__ = [ 'StatisticWebsites', 'StatisticCampaigns', 'StatisticDays', @@ -12,14 +16,14 @@ 'StatisticSubIds', 'StatisticSources', 'StatisticKeywords', -) +] class StatisticBase(Item): STATUSES = (1, 2, 3) SOURCES = ('g', 'y') - ACTION_TYPES = ('lead', 'Lead') + ACTION_TYPES = ('lead', 'sale') ORDERING = ( 'action', @@ -40,7 +44,7 @@ class StatisticBase(Item): @staticmethod def check_sub_id(sub_id): - return u'%s' % sub_id if len(sub_id) <= SUB_ID_MAX_LENGTH else None + return sub_id if len(sub_id) <= MAX_SUB_ID_LENGTH else None @staticmethod def check_sources(source): @@ -52,42 +56,60 @@ def check_status(status): @staticmethod def check_actions_type(action_type): - return action_type if action_type\ - in StatisticBase.ACTION_TYPES else None, + return action_type if action_type in StatisticBase.ACTION_TYPES else None, FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date, - 'website': int, - 'campaign': int, - 'subid': check_sub_id + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'total': lambda x: Item.sanitize_integer_value(x, 'total', blank=True), + 'subid': lambda x: StatisticBase.check_sub_id(x) } def get(self, url, **kwargs): """Base GET method""" kwargs['url'] = url - kwargs['allowed_filtering'] = self.FILTERING - kwargs['allowed_ordering'] = self.ORDERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_filtering(**kwargs).set_ordering(**kwargs).request(**kwargs) + + ordering = { + 'order_by': kwargs.get('order_by', []), + 'available': self.ORDERING + } + + filtering = { + 'filter_by': kwargs, + 'available': self.FILTERING, + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(**kwargs) class StatisticWebsites(StatisticBase): """ Statistics by websites - Required scope - "statistics" """ + SCOPE = 'statistics' + URL = Item.prepare_url('statistics/websites') def get(self, **kwargs): """ - res = client.StatisticWebsites.get() - res = client.StatisticWebsites.get(website=1, campaign=1) - res = client.StatisticWebsites.get(subid="ADS778") - res = client.StatisticWebsites.get(limit=2) - res = client.StatisticWebsites.get(date_start='01.01.2013') + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + subid (str) + total (int) + limit (int) + offset (int) + order_by (list of str) """ return super(StatisticWebsites, self).get(self.URL, **kwargs) @@ -97,18 +119,24 @@ class StatisticCampaigns(StatisticBase): """ Statistics by campaigns - Required scope - "statistics" """ + SCOPE = 'statistics' + URL = Item.prepare_url('statistics/campaigns') def get(self, **kwargs): """ - res = client.StatisticCampaigns.get() - res = client.StatisticCampaigns.get(website=1, campaign=1) - res = client.StatisticCampaigns.get(sub_id="ADS778") - res = client.StatisticCampaigns.get(limit=2) - res = client.StatisticCampaigns.get(date_start='01.01.2013') + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + subid (str) + total (int) + limit (int) + offset (int) + order_by (str) """ return super(StatisticCampaigns, self).get(self.URL, **kwargs) @@ -118,18 +146,25 @@ class StatisticDays(StatisticBase): """ Statistics by days - Required scope - "statistics" """ + SCOPE = 'statistics' + URL = Item.prepare_url('statistics/dates') def get(self, **kwargs): """ - res = client.StatisticDays.get() - res = client.StatisticDays.get(website=1, campaign=1) - res = client.StatisticDays.get(sub_id="ADS778") - res = client.StatisticDays.get(limit=2) - res = client.StatisticDays.get(date_start='01.01.2013') + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + subid (str) + total (int) + limit (int) + offset (int) + order_by (str) + """ return super(StatisticDays, self).get(self.URL, **kwargs) @@ -138,18 +173,24 @@ class StatisticMonths(StatisticBase): """ Statistics by months - Required scope - "statistics" """ + SCOPE = 'statistics' + URL = Item.prepare_url('statistics/months') def get(self, **kwargs): """ - res = client.StatisticMonths.get() - res = client.StatisticMonths.get(website=1, campaign=1) - res = client.StatisticMonths.get(sub_id="ADS778") - res = client.StatisticMonths.get(limit=2) - res = client.StatisticMonths.get(date_start='01.01.2013') + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + subid (str) + total (int) + limit (int) + offset (int) + order_by (str) """ return super(StatisticMonths, self).get(self.URL, **kwargs) @@ -159,9 +200,10 @@ class StatisticActions(StatisticBase): """ Statistics by actions - Required scope - "statistics" """ + SCOPE = 'statistics' + ORDERING = ( 'action', 'banner', @@ -182,36 +224,54 @@ class StatisticActions(StatisticBase): ) FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date, - 'closing_date_start': Item.check_date, - 'closing_date_end': Item.check_date, - 'status_updated_start': Item.check_long_date, - 'status_updated_end': Item.check_long_date, - 'website': int, - 'campaign': int, - 'subid': StatisticBase.check_sub_id, - 'subid1': StatisticBase.check_sub_id, - 'subid2': StatisticBase.check_sub_id, - 'subid3': StatisticBase.check_sub_id, - 'subid4': StatisticBase.check_sub_id, - 'source': StatisticBase.check_sources, - 'status': StatisticBase.check_status, - 'keyword': Item.to_unicode, - 'action': Item.to_unicode, - 'action_type': StatisticBase.check_actions_type, - 'action_id': int + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + 'closing_date_start': lambda x: Item.sanitize_date(x, 'closing_date_start', blank=True), + 'closing_date_end': lambda x: Item.sanitize_date(x, 'closing_date_end', blank=True), + 'status_updated_start': lambda x: Item.sanitize_long_date(x, 'status_updated_start', blank=True), + 'status_updated_end': lambda x: Item.sanitize_long_date(x, 'status_updated_end', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'subid': lambda x: StatisticBase.check_sub_id(x), + 'subid1': lambda x: StatisticBase.check_sub_id(x), + 'subid2': lambda x: StatisticBase.check_sub_id(x), + 'subid3': lambda x: StatisticBase.check_sub_id(x), + 'subid4': lambda x: StatisticBase.check_sub_id(x), + 'source': lambda x: StatisticBase.check_sources(x), + 'status': lambda x: StatisticBase.check_status(x), + 'keyword': lambda x: Item.sanitize_string_value(x, 'keyword', blank=True), + 'action': lambda x: Item.sanitize_string_value(x, 'action', blank=True), + 'action_type': lambda x: StatisticBase.check_actions_type(x), + 'action_id': lambda x: Item.sanitize_integer_value(x, 'action_id', blank=True), } URL = Item.prepare_url('statistics/actions') def get(self, **kwargs): """ - res = client.StatisticActions.get() - res = client.StatisticActions.get(website=1, campaign=1) - res = client.StatisticActions.get(subid="ADS778") - res = client.StatisticActions.get(limit=2) - res = client.StatisticActions.get(date_start='01.01.2013') + Args: + date_start (date) + date_end (date) + closing_date_start (date) + closing_date_end (date) + status_updated_start (date) + status_updated_end (date) + website (int) + campaign (int) + subid (str) + subid1 (str) + subid2 (str) + subid3 (str) + subid4 (str) + source (str) + status (int) + keyword (str) + action (str) + action_type (str) + action_id (int) + limit (int) + offset (int) + order_by (list of int) """ return super(StatisticActions, self).get(self.URL, **kwargs) @@ -221,8 +281,10 @@ class StatisticSubIds(StatisticBase): """ Statistics by sub-ids - Required scope - "statistics" """ + + SCOPE = 'statistics' + SUB_ID_NUMBERS = range(0, 5) ORDERING = ( @@ -239,18 +301,17 @@ class StatisticSubIds(StatisticBase): ) FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date, - 'website': int, - 'campaign': int, + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), } - URL = Item.prepare_url('statistics/sub_ids%s') + URL = Item.prepare_url('statistics/sub_ids%(subid_number)s') def sanitize_sub_id_number(self, number): if number not in self.SUB_ID_NUMBERS: - raise ValueError("Invalid subid number. '%s': %s" % ( - number, self.SUB_ID_NUMBERS)) + raise ValueError("Invalid subid number. '%s': %s" % (number, self.SUB_ID_NUMBERS)) def prepare_filtering(self, sub_id_number): params = copy(self.FILTERING) @@ -279,20 +340,35 @@ def get(self, sub_id_number=0, **kwargs): """ self.sanitize_sub_id_number(sub_id_number) - kwargs['url'] = self.URL % (sub_id_number or '') - kwargs['allowed_filtering'] = self.prepare_filtering(sub_id_number) - kwargs['allowed_ordering'] = self.prepare_ordering(sub_id_number) - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_filtering(**kwargs).set_ordering(**kwargs).request(**kwargs) + kwargs['url'] = self.URL % { + 'subid_number': sub_id_number or '' + } + + ordering = { + 'order_by': kwargs.get('order_by', []), + 'available': self.prepare_ordering(sub_id_number) + } + + filtering = { + 'filter_by': kwargs, + 'available': self.prepare_filtering(sub_id_number) + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(**kwargs) class StatisticSources(StatisticBase): """ Statistics by sources - Required scope - "statistics" """ + SCOPE = 'statistics' + ORDERING = ( 'actions', 'clicks', @@ -308,19 +384,24 @@ class StatisticSources(StatisticBase): ) FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date, - 'website': int, - 'campaign': int, + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), } URL = Item.prepare_url('statistics/sources') def get(self, **kwargs): """ - res = client.StatisticSources.get() - res = client.StatisticSources.get(date_start='01.01.2013') - res = client.StatisticSources.get(limit=2) + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + limit (int) + offset (int) + order_by (list of int) """ return super(StatisticSources, self).get(self.URL, **kwargs) @@ -330,9 +411,10 @@ class StatisticKeywords(StatisticBase): """ Statistics by keywords - Required scope - "statistics" """ + SCOPE = 'statistics' + ORDERING = ( 'actions', 'clicks', @@ -349,21 +431,26 @@ class StatisticKeywords(StatisticBase): ) FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date, - 'website': int, - 'campaign': int, - 'source': ( - lambda x: x if x in StatisticBase.SOURCES else None), + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'source': StatisticBase.check_sources, } URL = Item.prepare_url('statistics/keywords') def get(self, **kwargs): """ - res = client.StatisticKeywords.get() - res = client.StatisticKeywords.get(date_start='01.01.2013') - res = client.StatisticKeywords.get(limit=2) + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + source (str) + limit (int) + offset (int) + order_by (list of str) """ return super(StatisticKeywords, self).get(self.URL, **kwargs) diff --git a/pyadmitad/items/tickets.py b/pyadmitad/items/tickets.py new file mode 100644 index 0000000..7738418 --- /dev/null +++ b/pyadmitad/items/tickets.py @@ -0,0 +1,104 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from pyadmitad.items.base import Item + + +__all__ = [ + 'Tickets', + 'TicketsManager' +] + + +class Tickets(Item): + SCOPE = 'tickets' + + URL = Item.prepare_url('tickets') + SINGLE_URL = Item.prepare_url('tickets/%(ticket_id)s') + + def get(self, **kwargs): + """ + Args: + date_start (date) + date_end (date) + status (int) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', True), + 'status': lambda x: Item.sanitize_integer_value(x, 'status', True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, ticket_id): + """ + Args: + ticket_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'ticket_id': Item.sanitize_id(ticket_id) + } + + return self.transport.get().request(**request_data) + + +class TicketsManager(Item): + + SCOPE = 'manage_tickets' + + CREATE_URL = Item.prepare_url('tickets/create') + COMMENT_URL = Item.prepare_url('tickets/%(ticket_id)s/create') + + CREATE_FIELDS = { + 'subject': lambda x: Item.sanitize_string_value(x, 'subject'), + 'text': lambda x: Item.sanitize_string_value(x, 'text'), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign'), + 'category': lambda x: Item.sanitize_integer_value(x, 'category'), + 'priority': lambda x: Item.sanitize_integer_value(x, 'priority'), + } + + COMMENT_FIELDS = { + 'text': lambda x: Item.sanitize_string_value(x, 'text'), + } + + def create(self, **kwargs): + """ + Args: + subject (str) + text (str) + campaign (int) + category (int) + priority (int) + + """ + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + + return self.transport.post().set_data(data).request(url=self.CREATE_URL) + + def comment(self, ticket_id, **kwargs): + """ + Args: + ticket_id (int) + text (str) + + """ + request_data = { + 'url': self.COMMENT_URL, + 'ticket_id': Item.sanitize_id(ticket_id) + } + + data = Item.sanitize_fields(self.COMMENT_FIELDS, **kwargs) + + return self.transport.post().set_data(data).request(**request_data) diff --git a/pyadmitad/items/websites.py b/pyadmitad/items/websites.py index e23e1d8..c675d7e 100644 --- a/pyadmitad/items/websites.py +++ b/pyadmitad/items/websites.py @@ -1,4 +1,6 @@ -from copy import deepcopy +# coding: utf-8 +from __future__ import unicode_literals + from pyadmitad.items.base import Item @@ -12,54 +14,87 @@ class Websites(Item): """ List of websites - Required scope - "websites" """ + + SCOPE = 'websites' + URL = Item.prepare_url('websites') - SINGLE_URL = Item.prepare_url('websites/%(id)s') - - STATUS_FILTERING = ('new', 'pending', 'active', 'suspended', 'declined') - CAMPAIGN_STATUS_FILTERING = ('pending', 'active', 'declined', 'disabled') - FILTERING = { - 'status': lambda x: x if x in Websites.STATUS_FILTERING else None, - 'campaign_status': ( - lambda x: x if x in Websites.CAMPAIGN_STATUS_FILTERING else None), - } + SINGLE_URL = Item.prepare_url('websites/%(website_id)s') + + STATUS_NEW = 'new' + STATUS_PENDING = 'pending' + STATUS_ACTIVE = 'active' + STATUS_SUSPENDED = 'suspended' + STATUS_DECLINED = 'declined' + + CAMPAIGN_STATUS_PENDING = 'pending' + CAMPAIGN_STATUS_ACTIVE = 'active' + CAMPAIGN_STATUS_DECLINED = 'declined' + CAMPAIGN_STATUS_DISABLED = 'disabled' + + STATUS_LIST = [ + STATUS_NEW, STATUS_PENDING, STATUS_ACTIVE, + STATUS_SUSPENDED, STATUS_DECLINED + ] + CAMPAIGN_STATUS_LIST = [ + CAMPAIGN_STATUS_PENDING, CAMPAIGN_STATUS_ACTIVE, + CAMPAIGN_STATUS_DECLINED, CAMPAIGN_STATUS_DISABLED + ] def get(self, **kwargs): """ - res = client.Websites.get() - res = client.Websites.get(status='new', campaign_status='active') + Args: + status (str) + campaign_status (str) + limit (int) + offset (int) """ - kwargs['url'] = self.URL - kwargs['allowed_filtering'] = self.FILTERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_filtering(**kwargs).request(**kwargs) + filtering = { + 'filter_by': kwargs, + 'available': { + 'status': lambda x: x if x in self.STATUS_LIST else None, + 'campaign_status': lambda x: x if x in self.CAMPAIGN_STATUS_LIST else None + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) def getOne(self, _id, **kwargs): """ - res = client.Websites.getOne(_id=2) - res = client.Websites.getOne2(2) + Args: + _id (int) + """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) + requests_data = { + 'url': self.SINGLE_URL, + 'website_id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**requests_data) class WebsitesManage(Item): """ Manage websites - Required scope - "manage_websites" """ + + SCOPE = 'manage_websites' + CREATE_URL = Item.prepare_url('website/create') - UPDATE_URL = Item.prepare_url('website/update/%(id)s') - VERIFY_URL = Item.prepare_url('website/verify/%(id)s') - DELETE_URL = Item.prepare_url('website/delete/%(id)s') + UPDATE_URL = Item.prepare_url('website/update/%(website_id)s') + VERIFY_URL = Item.prepare_url('website/verify/%(website_id)s') + DELETE_URL = Item.prepare_url('website/delete/%(website_id)s') CREATE_FIELDS = { - 'name': lambda x: Item.sanitize_string_value(x, 'name', max_length=200), - 'kind': lambda x: Item.sanitize_string_value(x, 'kind', max_length=20), + 'name': lambda x: Item.sanitize_string_value( + x, 'name', max_length=200), + 'kind': lambda x: Item.sanitize_string_value( + x, 'kind', max_length=20), 'language': lambda x: Item.sanitize_string_value( x, 'language', max_length=2), 'adservice': lambda x: Item.sanitize_integer_value( @@ -68,13 +103,16 @@ class WebsitesManage(Item): x, 'site_url', max_length=255), 'description': lambda x: Item.sanitize_string_value( x, 'description', max_length=20000, min_length=100), - 'categories': lambda x: Item.sanitize_integer_array(x, 'categories'), + 'categories': lambda x: Item.sanitize_integer_array( + x, 'categories'), 'regions': lambda x: Item.sanitize_string_array( x, 'regions', max_length=2), 'atnd_visits': lambda x: Item.sanitize_integer_value( x, 'atnd_visits', blank=False), 'atnd_hits': lambda x: Item.sanitize_integer_value( - x, 'atnd_hits', blank=False) + x, 'atnd_hits', blank=False), + 'mailing_targeting': lambda x: Item.sanitize_bool_integer_value( + x, 'mailing_targeting', blank=True) } UPDATE_FIELDS = { @@ -95,49 +133,78 @@ class WebsitesManage(Item): 'atnd_visits': lambda x: Item.sanitize_integer_value( x, 'atnd_visits', blank=True), 'atnd_hits': lambda x: Item.sanitize_integer_value( - x, 'atnd_hits', blank=True) + x, 'atnd_hits', blank=True), + 'mailing_targeting': lambda x: Item.sanitize_bool_integer_value( + x, 'mailing_targeting', blank=True) } def create(self, **kwargs): """ - res = client.WebsitesManage.create(name='test', ....) + Args: + name (str) + kind (str) + language (str) + adservice (int) + site_url (str) + description (str) + categories (list of int) + regions (list of str) + atnd_visits (int) + atnd_hits (int) + mailing_targeting (bool) """ - data = self.sanitize_fields(self.CREATE_FIELDS, **kwargs) - kwargs['url'] = self.CREATE_URL - kwargs.pop('language', None) - return self.transport.set_method('POST').set_data(data).request(**kwargs) + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + + return self.transport.post().set_data(data).request(url=self.CREATE_URL) def update(self, _id, **kwargs): """ - Here _id is a website id. - - res = client.WebsitesManage.update(22, name='test', ....) + Args: + _id (int) + name (str) + language (str) + adservice (int) + site_url (str) + description (str) + categories (list of int) + regions (list of str) + atnd_visits (int) + atnd_hits (int) + mailing_targeting (bool) """ - data = self.sanitize_fields(self.UPDATE_FIELDS, **kwargs) - kwargs['url'] = self.UPDATE_URL - kwargs['id'] = self.sanitize_id(_id) - kwargs.pop('language', None) - return self.transport.set_method('POST').set_data(data).request(**kwargs) + data = Item.sanitize_fields(self.UPDATE_FIELDS, **kwargs) + + request_data = { + 'url': self.UPDATE_URL, + 'website_id': Item.sanitize_id(_id) + } + + return self.transport.post().set_data(data).request(**request_data) def verify(self, _id): """ - Here _id is a website id. - - res = client.WebsitesManage.verify(40) + Args: + _id (int) """ + request_data = { + 'url': self.VERIFY_URL, + 'website_id': Item.sanitize_id(_id) + } - data = {'url': self.VERIFY_URL, 'id': self.sanitize_id(_id)} - return self.transport.set_method('POST').request(**data) + return self.transport.post().request(**request_data) def delete(self, _id): """ - Here _id is a website id. - - res = client.WebsitesManage.delete(40) + Args: + _id (int) """ - data = {'url': self.DELETE_URL, 'id': self.sanitize_id(_id)} - return self.transport.set_method('POST').request(**data) + request_data = { + 'url': self.DELETE_URL, + 'website_id': Item.sanitize_id(_id) + } + + return self.transport.post().request(**request_data) diff --git a/pyadmitad/tests/base.py b/pyadmitad/tests/base.py index 431d25d..99bd1d3 100644 --- a/pyadmitad/tests/base.py +++ b/pyadmitad/tests/base.py @@ -1,41 +1,20 @@ -# -*- coding: utf-8 -*- +# coding: utf-8 +from __future__ import unicode_literals -from mocker import MockerTestCase -from pyadmitad.api import get_oauth_client -from pyadmitad.transport import build_headers, \ - HttpTransportPagination, HttpTransportOrdering, HttpTransportFiltering +from future import standard_library +standard_library.install_aliases() +from unittest import TestCase +from urllib.parse import urlencode -class BaseTestCase(MockerTestCase): +from pyadmitad.api import get_oauth_client_token - def prepare_data(self, **kwargs): - with_pagination = kwargs.pop('with_pagination', True) - with_ordering = kwargs.pop('with_ordering', True) - with_filtering = kwargs.pop('with_filtering', True) - data = kwargs.get('data', {}) or {} - if with_pagination: - data.update(HttpTransportPagination(**kwargs).to_value()) - if with_ordering: - data.update(HttpTransportOrdering(**kwargs).to_value()) - if with_filtering: - data.update(HttpTransportFiltering(**kwargs).to_value()) - return data or None + +class BaseTestCase(TestCase): + client = get_oauth_client_token(access_token='') @staticmethod - def prepare_method(**kwargs): - method = kwargs.get('method', 'GET') - return method if method in ('POST', 'GET') else 'GET' + def prepare_url(url, params=None, **kwargs): + base = url % kwargs - def set_mocker(self, url, **kwargs): - access_token = 'access_token' - self.client = get_oauth_client(access_token) - obj = self.mocker.patch(self.client.transport) - url = url % kwargs - kwargs = { - 'data': self.prepare_data(**kwargs), - 'headers': build_headers(access_token), - 'method': BaseTestCase.prepare_method(**kwargs), - 'debug': False, - 'files': None, - } - obj.api_request(url, **kwargs) + return base if not params else '%s?%s' % (base, urlencode(params, doseq=True)) diff --git a/pyadmitad/tests/data/image.png b/pyadmitad/tests/data/image.png new file mode 100644 index 0000000000000000000000000000000000000000..b96adc699a3b60ec2a350d5c0c45140b6e3324e0 GIT binary patch literal 7605 zcmV;m9ZKSfP)etkU(}f@R5)LAtB{klCUJ3Y{-&qcInlU zZER!91@~^;+o<<$*_JI?y^VTrqb|A228;^^46fL?;oh*%p7Zn`ejdGfWi+E1N$3B4 z|8HF8wfpY<&8_E}OdcqU#d7=BxQ0i!#J0S!HKt|G*0|<1TVk7Aw#K$>-5T4x7rh^S zO6E_{yT$*nMaSVWlRD#D{;@5-{UH;J#bWigK8maQ_Lk_Te{P9s3fU6V(!M38`2rf8 z=(7mMdW7Ws&e+B$!3^KvRA;eRP8R^&wkfLl>CLgtYY{xBHY0#~(fjebK^vl5e#LKs z#bWhX8{?XOv^lETcT;rpC!3<1zyrK_Q%uv;_R#iw`R%Y+EMq{6u>N{ubdzOcRP*(X zQB9!RkxkGV+5n9~^-%9$2ep2+P_?`o%9mC{+0rWftb$7Xd$qYn_`fFnUPpK%7`jH6 z&*8nrwujbnpB#(D(g0d)%VP+Q4D>ZU5Niu-fQEoNgjEfcEUtw71?6CwR}Oh|%OLli zGRT=z2HCSqAq$;3s{}G;mO%Q9Vn~}_45`zKA!TY2tV5?xFA~02ys#4L{A-~jLO1Nr zVl6JDEx7G|etRqy>lRVdZbM{aEczN6?0J1e12hNMLycbzlr5=(l0}sQjEWXiKp{GR zzGGlZo>B-&lM7+(q(WFTu>ca$>G+*uJbz1w71ZvDyfL!z^u~y$88kx6ZF#c!3x8mK^vT7OqZM8jHY_SthY)|Tr0>ad@z1}c|TL&dV*2U`MyZN)eX#E-Q= z0)Emt>AqC%`$~I6<18A<<+sdY4Is5eHa^iF-uNLJl-v^1AawrM_|^y={hk80xG{MU zJ30^I@lyhvy=Oo-G=()i$Zwm)8UX4Dw~lBJZ#YNII$O6Q&>8~kg${fgwe(c5#rWhx zG&&alUNFA`+C#fPi|EhV!W;j@Z=J>Jr=q4=Tc|abMlzM$9BdUvIH{)ASHKoEDhDD* z=D?as1=wV+1v}4nr8V3-lyd-!)z8#qZn^_U7t?5tnXPHmu?a^ysitP_qhO18i<}LX zx#dEwQ{g#pw1rq_b53Bf`iXX$E#GPhwdNAQluS^g3AKG4Y~draA#8Xy*4_%S=2@@w ztT%9UcOmBp7OQ{M)XJ2A){1WKhhPgGo`wIP2erPniXg{lUdB0t#p=U-YS?dPHi-pLCz{G!ebO#$5A#&NVES2dm0l_1%WR zx{uHxH$jcoQVvwG1q{v*{!I66n>ILtTF%R_SS$~gHK0D3;3PLqQtgV&F|fhr@0|gm zBeH~NvP~mvVC{I$JuH?dLe0PSC4!RN1TBhN#$f9Lo3D4eundb9_S>Y@#cFf)PdOK{ zSRN#tuXbl$KXu2Ad7o%wFk`GOY;YX(iaNwuo~Qi{(MW$*5)mkz}fo ztdHp0CxF4$9c*T=G#n|d0C}2;&-)_hCPVrqdII`yoD?h_`Z;7G>D$Y$-~mf zFK1-=WxnRoI9(xy03?~BTx>?5G1%0>w(N~Gp-C!FJHdW66FE;Rs#=yYZQWnNwgA0d zW*^Q5x>cx>U(~@?>tFjb1kY81k7Nq6NE;6`*tEe$f2U`Wr(c!1W_>og^VDAz5H8n( z?=sj7fURm-%~FDmWa|9xM5wtFY{4T|!Hx|dg1dC_{3Y12@k7XoDTleQhhnWR2K)!7 zs)B9FYpFu5O`ewGvp&l#8}MNJA^HiK|4qj+kD^1+N5r)GF@sGf*uVt0RV=Ok2n~|k zu(sA2=RUAS_$0!{y6xa8x_adrbX0Xhz_8Un_jS0EUx$q0Y)-GXB$GzU0{nFyAG`R9pH(&e*HQ$6>WfduSLRguM|bF zEq*l>2QLaho~GyvrSnSf<#NhkW3crS*h&{yts=NcCMa!-cMsTByqyZAN%i3AI`qls zuzW-;EPW$c5o|$kW`aCT1ux}61O^*}tyjQCo!SVtFVG;F;N=dmB~QtL!c{fUS9JE& zIS3rL3Za(T4Qz{EO@Ta|q9RXMe8vhcrwld*TQ7jEd{Oxy2r7~*a1?Lbik4mnTgFTa zto6+kntDCa=N})3!+Vaw;oZmJkmOxmJ}CGo96%r5dmN5^b_$MtdJ2voI1N`WUUlet zM-Lo_MT6oL!A5iD3;hW=3FS~t+SrmpEO#=vIrw%m?UDb7%2 z3NjVEP6S$Sfvsp!IfPC~6#A<4>e7YFy#_W~c|eQXH&^Y}ysvlLcOlf)bpu<{lmd{_ zS9GHjfp)u?iI4TLAhbDH6S$8m{RXdf9{qn#2Adc93kRNirtG`~#?=N3I{AuNL)N1p zn`0;SGT5j#JJob^PMo9OjDEy{{Bo~Y12l52OX!ykfekk$`9+~P8J1kJtP0*uXzdlS zl_7wF$E*})|LfIf+YiC|(#~E5o3x~=vrc`Y@b#~*ga63xV54)&(-{4wlc4r{g=R`m zd`MQC(xbc49S&aWGWvv*z-F(eNYNj5i%pzeVvSL$7Wp@^miDnq?d}8dJp1f?hu-PK z9u78o#*I$QFZ4t53j_J(PO*mQ$hEF&{BYe0wu1TPFUt#EJp!mVz(xQK@kzib6G!z= zuh{uH0bAI}Z201!I)n=ntlhxokNs>mV8h9~r4Kj(Z1GOaq>{IZwLm@n zSk*4Lz9V3h`<$3Ae^KZU`d75SLXB*PLZLY+UPq}P@)5C*&Gy#DXgMkj*i{ zHvg~WrxeZ`>Vo&QOY8>GqaqjPms`P>Kfml9gd0evoy~g#Y@wr8!AI|ZrhnuSM>_Md znM~0=+yypT1YfqYUG>`ATX%H>8~s_H#^l2JmCgvHc)t^xph_O2|XoS7hfXSQf>j;f|u6`p_b(7oIkgG zoRgZPK6W~^1JVD5eo$6(`K8$Ldsrire57&Bhjn(WQ**P4x^l(O(=GzrDfIIO)2%__ zng>l?>o<)wt!&fs4G#vJNgOb+eQp}*4H28<4~n&M(qrOisMcW2VU5w!2gG-zr?0X_ z%r6aMt=TcKS>~2E(5Ri{tuZYw1zX(PNf<$6n{KF`K5_<1FeZ<8xY_- zVt?aV<6xUtwvC`6`CSB>tH8E$dc(*9~kG z?L(f<7-KHci6f&=h?(^V299}D?2xsWQS24CyEHoeLF~&>({IrSoCr1_2h!{Tg}ymQ z^t{b^%KHG=9v3_DA2M(Z)zUAjystSb(@L~j2#GrGcb_4!;gX+^XxR@LYZ35XLYqs# zmNX>;DpDJDL+#wj^H3FJRg9qPZm>1xZC1VhirIzTz!pC?52SP!Jhv>{jt)%(YYX{ zv*0;pm|Dm3GC3a!mt+};$mY*KHE&MNK(HaeG-&Vpx^Zgeuxs1x{8gG1!k zRQ|8j&UMuK<&4%s-3_)aHs^WN!Smd(86os^u-zuslst`sBTbl`hCHN?`kz#xd($1~#lY9Ku3I1EZm4ZY-K=~2J$Hi5e{hCjc+FsQFWBxhDbL??71$osI40a>V9T6YvIh-h>ig*_*sK`xD0J*9IDYt~ zzTr>odfQ(3o}qn3-CAQCwNACa@WheR-BunD*rG?}f|Smp)13ge1e5D#aw~!D-%a`# zlrz{o4{YHs%rEs0{1BR51~%MqcnfVhEV(COYYwi1uraIP;K!fqp4QQtvk?-;Q$WTP z3<-`Nmjdx<+Vg-MJ0=-o(9xs29OaV?O_mLA0UK?2RgOt_8Q9XM7ssGM@`lJJhrkw(OQqiH*bSZ_XM-(dcqS}(B^owa z-&MWtHH`ingvTj?jqX97&Iq<)Jp|hhm6uGhw(in#4x?ZrsjU%R&tUUdu&FJfqm?9j zc26@3Hqudz7KLlTmO8C?lDxn*1sjjBUd>5NvA8Th{74ha+IC)0htM0|uK%g6)JxZO!B&uoausBP##M zHDH@w{67dckW3+kjDRhDMkdtdwt3XfQI3Kw$|nb#oT=b9cn!>dB?_iL=MO1C1-b@8 zD9B{ov=T+I(Z9*l7=0tTXZwq4O}p@QA=tDQG#eQ}XlJnL0^5cj-siV%?$Op^E(F^< z8u?|Bf#=X4L3iEcl7~|;@`Pj>k<<&eym=NVO04y`E%AciUJdWO7$S49;5jec>e-@a zJ{JhnpY=zO`GN1~I9OxOf%eL+aOR}`aOLaEb}H^tMPQ>1UF2zu{=A2^weM+UmN=c~ z32*>xf6=%$g{RT8G|9*0-bL_z32e1C$NbLV{e4RVYiyQ#k6B_TCB6sMC+B`(# z8M<%gvTkb8AvBPwV|#mo_MQiY3ko1TtXTI_sooUDM5A!yMoh;zy6xWq-0IH{;bjkO zkr;VbN?&sF)Iw*>VN+|0(grPBy9qsRQV$1Hs&ef68ku#wscR0CJx$RVVPmk}DvQlA zuQ_m!YO&2eHfaYgc*LY0Q?1@WjZ-PVsx?W+n!5Hq07GC)np~8TJOvA!=nYZoyHr(~ z%OE8%-y@;c+d8uEBm`rhoYGG464(+jWRX055p2%bGjqGfi=M7>?EjjyH}g;f+XR#L zyrvW4G=X1=qiqj~zq+j^eee`p#o8k{6MUv4wp;!TIQwqjVpOoY}3}HYII@ zjY%6|!`cn7e$9I5NbGX^>JDf_x321dmX+v)c4$U7t!RfvoHo&bw#KzWeQYb#;XTx1 z5X{StY@2r&vuGZxXSD-d! z3j_?w1YhrTRj|=_uy!N|TSvGRA|@unmnY6@7W^*{e+g}P zypf&Y`vTZ5VMwKpq8$*8O-mXO@b}Kp1Y3+x9!TjbcyeK;$$9d3BiKx0bN2>=&E;St z71^ng(_oG?5&IsoxlhebrNEC1-3T_)IGwdWCxUHa;qo<;3P5tLU(MHGquSc)>8UvF zL3{tt6S$jHD^3G3vJ<>fu&poI)@v|3d*T9o(6$ds;@Th_`_d@7M#2}9 z@n)I|o>=g#6Tx=7#ylZ4Qy*J;55Y#7>>%7uY4qDL*t(A{8maM|6sLK}MFRxc4(1oy zo-nzGU?Wkuq^}&fK7~K})G*jqO)UN?7Q!1;5XqSsT1Ns~C-!rFvEvAwKX$guM{oL^ zE-k!=T|1y>;$*$Lheux&1Q4^AFD-Q@$P;e>*Z@0a_O;4ePA)yG61*m)H8y?Yq; zZax5=wR@m`-Db$~x5A2vg^FQ|^n#7PE2XFCLuRx28z$$9GH;~8UqagT{ikR@CxDIg zjLBGRFN$3IYq7tN!KPos4L1LW*y-;IHV7A5weXlmjj!IobI|T-y9``^p9uC!!(bEo z+9njV&_q_jSP*Hhnk#pLcNA>dVobBlSzQat(`SgY`El}ViZrFdh4I+*f{o7mb$ZhE zClrL6TqoLOH%Da}2>seyOF>%`$%VgnW=(w4j{9<{;So zyfQ$ZmgF^WTYhJ9y$CS>K#vqhK?jQ&{4P`PiKckGAT~Q_bK>Vs#+J^owDp+yenVuQ zCDw-SFbEi`ala*=kHAa&i{9PK&+?p@Uwp*8^Ao3?J;f*G&Q;^{52JyMH4Fh@3^wCn zqw`4VDERICIxdSWmYcM4oMkTUfhoBNA;VxZ1~y-;ttH6Pu*;VJ=G?_%xk=$~rQeMO z|2P$XGBu$nlo5kXKiDGhcq#ovw{p&6vD{s28=L2!fCXPNg$!q~=>uE9;7pLG9eL$g z%Zr?=SS&Xyc}(fOSnv-~)K|#_CkC53*p?&MVsS#QJlzt;TBoGFLWU1 zA{NWT6gxKew^-<}Qo)y8wY-|aW&<`~{G{#m}eM zp@J_t4S~`THZZ{!jGywfAV-hNIUG7B=ONB5ELI;<)LYpvW1+uB1z&P5PTn18U<)3a zCH%iU9U@2N91ZiydW>@ki`55(1^O*4$T#GLnxN8(KpO~PqfZ(vxCLq>bIwKhlnS@p1d*nY{tY(zw0&;$)yR>VFLAD5vHF3+M`TUKf`6R~zhqi4UymT` zk6@!u`Ll{XgR2ex&bfib>Q8VRo--_LME2D%0)S|O0s*N%fldB=G9QZ=mh~j(0v4+u z3Y%!JVvXh^)oQ->h%8~m)au{2!6wz#6nrpZc;>J9?Xy_@SV3=QKaNeZ_h}Kh+)*QQ z1u*qRu!Rl3^;y35@XU&!k!kd60m`8rTZ+ zPI(X~vZkYN(1cdG2^5JFX$P!Iw}UMX!A8x_U|bqyd!Jp-xEwe%!%VeNeyc1NOOXH2 zjAsIeX6_24Wk?Ff+4_()rAUBSZ-FfVO-)YfL$h~Zwf@6+B>{`YQjq!O#P0+Q z$()JnD-Y2cOC?k7h?;}R2)SGYm$Sh}K#Lu15t^H{9LnB(h~6GBB+Hu@va?t$ZNMQY z0*0h}V^oY?6d6O!w7@(TYgM!#C&7uPj%4GSR4uTjOfAL`<^{CX9mHW=v`7Nko0*_> zJ;C1T<>y>^Q4NmY8KkxKn1lm;dpp?fsrCBVNTNU_b_5)_GjAwkk(k39F zqRdp&@=CWLsOs@^ldo6CR{ZS1eFtjj-_3*46Y(?1cTn0Wvv=AbL%c2D<5Xm^2A=*O Xc!oek1CRLa00000NkvXXu0mjfCLQ0{ literal 0 HcmV?d00001 diff --git a/pyadmitad/tests/test_announcements.py b/pyadmitad/tests/test_announcements.py index f8e485e..468825a 100644 --- a/pyadmitad/tests/test_announcements.py +++ b/pyadmitad/tests/test_announcements.py @@ -1,54 +1,79 @@ # coding: utf-8 +from __future__ import unicode_literals import unittest +import responses from pyadmitad.items import Announcements from pyadmitad.tests.base import BaseTestCase -ANNOUNCEMENTS_RESULTS = { - u'results': [ - { - u'message': u'Сотрудничество подтверждено', - u'id': 264, - u'advcampaign': { - u'id': 8, - u'name': u'AdvCamp 3' - }, - u'event': u'request_accepted' - } - ], - u'_meta': { - u'count': 50, - u'limit': 1, - u'offset': 0 - } -} - - class AnnouncementsTestCase(BaseTestCase): def test_get_announcements_request(self): - self.set_mocker(Announcements.URL, limit=1) - result = ANNOUNCEMENTS_RESULTS - self.mocker.result(result) - self.mocker.replay() - res = self.client.Announcements.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Announcements.URL, params={ + 'limit': 1, + 'offset': 230 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 50, + 'limit': 1, + 'offset': 230 + }, + 'results': [{ + 'message': 'Message', + 'id': 264, + 'advcampaign': { + 'id': 8, + 'name': 'AdvCamp' + }, + 'event': 'request_accepted' + }] + }, + status=200 + ) + + result = self.client.Announcements.get(limit=1, offset=230) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(1, len(result['results'])) def test_get_announcements_request_with_id(self): - self.set_mocker(Announcements.SINGLE_URL, id=264, with_pagination=False) - result = ANNOUNCEMENTS_RESULTS['results'][0] - self.mocker.result(result) - self.mocker.replay() - res = self.client.Announcements.getOne(264) - self.assertEqual(res[u'id'], 264) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Announcements.SINGLE_URL, announcement_id=264), + match_querystring=True, + json={ + '_meta': { + 'count': 50, + 'limit': 1, + 'offset': 230 + }, + 'results': [{ + 'message': 'Message', + 'id': 264, + 'advcampaign': { + 'id': 8, + 'name': 'AdvCamp' + }, + 'event': 'request_accepted' + }] + }, + status=200 + ) + + result = self.client.Announcements.getOne(264) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(1, len(result['results'])) if __name__ == '__main__': diff --git a/pyadmitad/tests/test_arecords.py b/pyadmitad/tests/test_arecords.py new file mode 100644 index 0000000..7b36ee5 --- /dev/null +++ b/pyadmitad/tests/test_arecords.py @@ -0,0 +1,72 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from pyadmitad.items import Arecords +from pyadmitad.tests.base import BaseTestCase + + +class ArecordsTestCase(BaseTestCase): + + def test_get_arecords_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Arecords.URL, params={ + 'limit': 20, + 'offset': 0 + }), + match_querystring=True, + json={ + 'results': [{ + 'domain': 'somewebsite.ru', + 'website_id': 12, + 'name': 'Some website' + }, { + 'domain': 'mywebsite.kz', + 'website_id': 10, + 'name': 'My website' + }], + '_meta': { + 'limit': 20, + 'offset': 0, + 'count': 2, + } + }, + status=200 + ) + + result = self.client.Arecords.get() + + self.assertEqual(len(result['results']), 2) + + def test_get_single_arecords_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Arecords.FOR_WEBSITE_URL, website_id=10), + match_querystring=True, + json={ + 'results': [{ + 'domain': 'mywebsite.kz', + 'website_id': 10, + 'name': 'My website' + }], + '_meta': { + 'limit': 20, + 'offset': 0, + 'count': 1, + } + }, + status=200 + ) + + result = self.client.Arecords.getForWebsite(10) + + self.assertEqual(len(result['results']), 1) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_auxiliary.py b/pyadmitad/tests/test_auxiliary.py index 8fd1830..60bbce6 100644 --- a/pyadmitad/tests/test_auxiliary.py +++ b/pyadmitad/tests/test_auxiliary.py @@ -1,483 +1,323 @@ -# -*- coding: utf-8 -*- +# coding: utf-8 +from __future__ import unicode_literals import unittest +import responses + from pyadmitad.tests.base import BaseTestCase -from pyadmitad.items.auxiliary import * +from pyadmitad.items.auxiliary import WebsiteTypes, WebsiteRegions, \ + SystemLanguages, SystemCurrencies, AdvertiserServices, CampaignCategories class WebsiteTypesTestCase(BaseTestCase): def test_get_website_types_request(self): - self.set_mocker(WebsiteTypes.URL) - result = { - u'results': [ - u'website', - u'doorway', - u'contextual', - u'social_app', - u'social_group', - u'social_teaser', - u'arbitrage' - ], - u'_meta': { - u'count': 7, - u'limit': 20, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsiteTypes.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.mocker.verify() - - def test_get_website_types_request_with_pagination(self): - self.set_mocker(WebsiteTypes.URL, offset=1, limit=2) - result = { - u'results': [ - u'doorway', - u'contextual' - ], - u'_meta': { - u'count': 7, - u'limit': 2, - u'offset': 1 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsiteTypes.get(offset=1, limit=2) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertEqual(len(res[u'results']), 2) - _meta = res[u'_meta'] - self.assertEqual(_meta[u'count'], 7) - self.assertEqual(_meta[u'limit'], 2) - self.assertEqual(_meta[u'offset'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(WebsiteTypes.URL, params={ + 'limit': 5, + 'offset': 0 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 9, + 'limit': 5, + 'offset': 0 + }, + 'results': [ + 'website', + 'doorway', + 'contextual', + 'youtube', + 'social_app', + ] + }, + status=200 + ) + + result = self.client.WebsiteTypes.get(limit=5) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(5, len(result['results'])) class WebsiteRegionsTestCase(BaseTestCase): def test_get_website_regions_request(self): - self.set_mocker(WebsiteRegions.URL) - result = { - u'results': [ - u'RU', u'UA', u'BY', u'KZ', u'DE', u'FR', u'US', u'AM', u'AU', - u'AZ', u'CA', u'EE', u'GE', u'KG', u'LV', u'LT', u'MD', u'TJ', - u'TM', u'UZ' - ], - u'_meta': { - u'count': 20, - u'limit': 20, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsiteRegions.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - def test_get_website_regions_request_with_pagination(self): - self.set_mocker(WebsiteRegions.URL, offset=1, limit=2) - result = { - u'results': [u'UA', u'BY'], - u'_meta': { - u'count': 20, - u'limit': 2, - u'offset': 1 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsiteRegions.get(offset=1, limit=2) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(len(res[u'results']), 2) - _meta = res[u'_meta'] - self.assertEqual(_meta[u'count'], 20) - self.assertEqual(_meta[u'limit'], 2) - self.assertEqual(_meta[u'offset'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(WebsiteRegions.URL, params={ + 'limit': 2, + 'offset': 0 + }), + match_querystring=True, + json={ + '_meta': { + 'limit': 2, + 'offset': 0, + 'count': 6 + }, + 'results': ['RU', 'EN'] + }, + status=200 + ) + + result = self.client.WebsiteRegions.get(limit=2) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(2, len(result['results'])) class SystemLanguagesTestCase(BaseTestCase): def test_get_languages_request(self): - self.set_mocker(SystemLanguages.URL) - result = { - u'results': [ - { - u'flag': u'https://admitad.com/media/images/flags/' - u'c8ef33a926799c7c3d7103212a78b187.png', - u'language': u'Русский', - u'language_code': u'ru' + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(SystemLanguages.URL, params={ + 'limit': 2, + 'offset': 0 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 4, + 'limit': 2, + 'offset': 1 + }, + 'results': [{ + 'flag': 'http://cdn.admitad.com/images/flags/en.svg', + 'language': 'English', + 'language_code': 'en' + }, { + 'flag': 'http://cdn.admitad.com/images/flags/de.svg', + 'language': 'Deutsch', + 'language_code': 'de' + }] }, - { - u'flag': u'', - u'language': u'Deutsch', - u'language_code': u'de' - } - ], - u'_meta': { - u'count': 2, - u'limit': 20, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.SystemLanguages.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() + status=200 + ) + + result = self.client.SystemLanguages.get(limit=2) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(2, len(result['results'])) def test_get_language_request_with_code(self): - self.set_mocker(SystemLanguages.SINGLE_URL, - code='ru', with_pagination=False) - result = { - u'flag': u'https://admitad.trezor.by/media/images/flags/' - u'c8ef33a926799c7c3d7103212a78b187.png', - u'language': u'Русский', - u'language_code': u'ru' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.SystemLanguages.getOne(code='ru') - self.assertIn(u'flag', res) - self.assertIn(u'language', res) - self.assertIn(u'language_code', res) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(SystemLanguages.SINGLE_URL, code='en'), + json={ + 'flag': 'http://cdn.admitad.com/images/flags/en.svg', + 'language': 'English', + 'language_code': 'en' + }, + status=200 + ) + + result = self.client.SystemLanguages.getOne(code='en') + + self.assertIn('flag', result) + self.assertIn('language', result) + self.assertIn('language_code', result) class SystemCurrenciesTestCase(BaseTestCase): def test_get_currencies_request(self): - self.set_mocker(SystemCurrencies.URL) - result = { - u'results': [ - { - u'code': u'EUR', - u'min_sum': u'20.00', - u'name': u'Евро', - u'sign': u'€' + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(SystemCurrencies.URL, params={ + 'limit': 1, + 'offset': 0 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 4, + 'limit': 1, + 'offset': 0 + }, + 'results': [{ + 'code': 'USD', + 'min_sum': '25.00', + 'name': 'American dollar', + 'sign': '$' + }] }, - { - u'code': u'RUB', - u'min_sum': u'750.00', - u'name': u'Российский рубль', - u'sign': u'руб.' - }, - { - u'code': u'USD', - u'min_sum': u'25.00', - u'name': u'Американский доллар', - u'sign': u'$' - } - ], - u'_meta': { - u'count': 3, - u'limit': 20, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.SystemCurrencies.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - def test_get_currencies_request_with_pagination(self): - self.set_mocker(SystemCurrencies.URL, offset=1, limit=1) - result = { - u'results': [ - { - u'code': u'RUB', - u'min_sum': u'750.00', - u'name': u'Российский рубль', - u'sign': u'руб.' - } - ], - u'_meta': { - u'count': 3, - u'limit': 1, - u'offset': 1 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.SystemCurrencies.get(offset=1, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(res[u'_meta'][u'offset'], 1) - self.mocker.verify() - - -class AdvertiserServiceTestCase(BaseTestCase): + status=200 + ) + + result = self.client.SystemCurrencies.get(limit=1) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(1, len(result['results'])) + + +class AdvertiserServicesTestCase(BaseTestCase): def test_get_advertiser_services(self): - self.set_mocker(AdvertiserServices.URL) - result = { - u'results': [ - { - u'allowed_referrers': u'', - u'id': 1, - u'logo': u'https://admitad.com/media/adservice/images/' - u'755c6ece4a7f2a45548737c212906434.png', - u'name': u'Yandex.Direct', - u'url': u'http://direct.yandex.ru/' + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(AdvertiserServices.URL, params={ + 'limit': 2, + 'offset': 1 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 12, + 'limit': 2, + 'offset': 1 + }, + 'results': [{ + 'allowed_referrers': '', + 'id': 4, + 'logo': 'http://cdn.admitad.com/adservice/images/f7e67e924fa05952f03e0c8c40a11651.png', + 'name': 'Google AdWords', + 'url': 'http://adwords.google.com/' + }, { + 'allowed_referrers': 'facebook.com', + 'id': 3, + 'logo': 'http://cdn.admitad.com/adservice/images/e6fee9e2ca69a2113d1339ecbe361ea5.png', + 'name': 'Facebook', + 'url': 'http://facebook.com/' + }] }, - { - u'allowed_referrers': u'', - u'id': 2, - u'logo': u'https://admitad.com/media/adservice/images/' - u'273ad9483718164ffd05066a8bebec46.png', - u'name': u'Бегун', - u'url': u'http://begun.ru/' - } - ], - u'_meta': { - u'count': 2, - u'limit': 20, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.AdvertiserServices.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - def test_get_advertiser_services_with_pagination(self): - self.set_mocker(AdvertiserServices.URL, offset=1, limit=1) - result = { - u'results': [ - { - u'allowed_referrers': u'', - u'id': 2, - u'logo': u'https://admitad.com/media/adservice/images/' - u'273ad9483718164ffd05066a8bebec46.png', - u'name': u'Бегун', - u'url': u'http://begun.ru/' - } - ], - u'_meta': { - u'count': 2, - u'limit': 1, - u'offset': 1 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.AdvertiserServices.get(offset=1, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(res[u'_meta'][u'offset'], 1) - self.mocker.verify() + status=200 + ) + + result = self.client.AdvertiserServices.get(limit=2, offset=1) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(2, len(result['results'])) def test_get_advertiser_services_with_id(self): - self.set_mocker( - AdvertiserServices.SINGLE_URL, - **{'id': 2, 'with_pagination': False}) - result = { - u'allowed_referrers': u'', - u'id': 2, - u'logo': u'https://admitad.com/media/adservice/images/' - u'273ad9483718164ffd05066a8bebec46.png', - u'name': u'Бегун', - u'url': u'http://begun.ru/' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.AdvertiserServices.getOne(_id=2) - self.assertEqual(res[u'id'], 2) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(AdvertiserServices.SINGLE_URL, id=3), + match_querystring=True, + json={ + 'allowed_referrers': 'facebook.com', + 'id': 3, + 'logo': 'http://cdn.admitad.com/adservice/images/e6fee9e2ca69a2113d1339ecbe361ea5.png', + 'name': 'Facebook', + 'url': 'http://facebook.com/' + }, + status=200 + ) + + result = self.client.AdvertiserServices.getOne(3) + + self.assertIn('id', result) + self.assertIn('name', result) + self.assertIn('url', result) def test_get_advertiser_services_with_kind(self): - self.set_mocker(AdvertiserServices.KIND_URL, kind='contextual') - result = { - u'results': [ - { - u'allowed_referrers': u'', - u'id': 1, - u'logo': u'https://admitad.com/media/adservice/images/' - u'755c6ece4a7f2a45548737c212906434.png', - u'name': u'Yandex.Direct', - u'url': u'http://direct.yandex.ru/' + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(AdvertiserServices.KIND_URL, kind='website', params={ + 'limit': 1, + 'offset': 0 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 10, + 'limit': 20, + 'offset': 0 + }, + 'results': [{ + 'allowed_referrers': 'facebook.com', + 'id': 3, + 'logo': 'http://cdn.admitad.com/adservice/images/e6fee9e2ca69a2113d1339ecbe361ea5.png', + 'name': 'Facebook', + 'url': 'http://facebook.com/' + }] }, - { - u'allowed_referrers': u'', - u'id': 2, - u'logo': u'https://admitad.com/media/adservice/images/' - u'273ad9483718164ffd05066a8bebec46.png', - u'name': u'Бегун', - u'url': u'http://begun.ru/' - } - ], - u'_meta': { - u'count': 2, - u'limit': 20, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.AdvertiserServices.getForKind(kind='contextual') - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - def test_get_advertiser_services_with_kind_and_id(self): - self.set_mocker(AdvertiserServices.KIND_SINGLE_URL, - id=2, kind='contextual', with_pagination=False) - result = { - u'allowed_referrers': u'', - u'id': 2, - u'logo': u'https://admitad.com/media/adservice/images/' - u'273ad9483718164ffd05066a8bebec46.png', - u'name': u'Бегун', - u'url': u'http://begun.ru/' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.AdvertiserServices.getForKindOne(2, 'contextual') - self.assertEqual(res[u'id'], 2) - self.mocker.verify() + status=200 + ) + + result = self.client.AdvertiserServices.getForKind('website', limit=1) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(1, len(result['results'])) class CampaignsCategoriesTestCase(BaseTestCase): def test_get_campaigns_categories(self): - self.set_mocker(CampaignCategories.URL) - result = { - u'results': [ - { - u'id': 3, - u'name': u'Браузерные', - u'parent': { - u'id': 2, - u'name': u'Онлайн-игр', - u'parent': None - } + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CampaignCategories.URL, params={ + 'limit': 2, + 'offset': 1, + 'order_by': 'name' + }), + match_querystring=True, + json={ + '_meta': { + 'count': 12, + 'limit': 2, + 'offset': 1 + }, + 'results': [{ + 'id': 13, + 'language': 'en', + 'name': 'MobileCategory', + 'parent': None + }, { + 'id': 33, + 'language': 'en', + 'name': 'ZooCategory', + 'parent': None + }] }, - { - u'id': 5, - u'name': u'Другая', - u'parent': None - }, - { - u'id': 4, - u'name': u'Клиентские', - u'parent': { - u'id': 2, - u'name': u'Онлайн-игры', - u'parent': None - } - } - ], - u'_meta': { - u'count': 3, - u'limit': 20, - u'offset': 0 - } - } - - self.mocker.result(result) - self.mocker.replay() - res = self.client.CampaignCategories.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - def test_get_campaigns_categories_with_pagination(self): - self.set_mocker(CampaignCategories.URL, limit=3) - result = { - u'results': [ - { - u'id': 3, - u'name': u'Браузерные', - u'parent': { - u'id': 2, - u'name': u'Онлайн-игр', - u'parent': None - } - }, - { - u'id': 5, - u'name': u'Другая', - u'parent': None - }, - { - u'id': 4, - u'name': u'Клиентские', - u'parent': { - u'id': 2, - u'name': u'Онлайн-игры', - u'parent': None - } - } - ], - u'_meta': { - u'count': 3, - u'limit': 3, - u'offset': 0 - } - } - - self.mocker.result(result) - self.mocker.replay() - res = self.client.CampaignCategories.get(limit=3) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 3) - self.mocker.verify() + status=200 + ) + + result = self.client.CampaignCategories.get(limit=2, offset=1, order_by='name') + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(2, len(result['results'])) def test_get_campaigns_categories_with_id(self): - self.set_mocker( - CampaignCategories.SINGLE_URL, id=3, with_pagination=False) - result = { - u'id': 3, - u'name': u'Браузерные', - u'parent': { - u'id': 2, - u'name': u'Онлайн-игр', - u'parent': None - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.CampaignCategories.getOne(3) - self.assertEqual(res[u'id'], 3) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CampaignCategories.SINGLE_URL, id='13'), + match_querystring=True, + json={ + 'id': 13, + 'language': 'en', + 'name': 'MobileCategory', + 'parent': None + }, + status=200 + ) + + result = self.client.CampaignCategories.getOne(13) + + self.assertIn('id', result) + self.assertIn('name', result) + self.assertIn('parent', result) + self.assertIn('language', result) if __name__ == '__main__': diff --git a/pyadmitad/tests/test_banners.py b/pyadmitad/tests/test_banners.py index d14516e..b1e77d3 100644 --- a/pyadmitad/tests/test_banners.py +++ b/pyadmitad/tests/test_banners.py @@ -1,6 +1,9 @@ -# -*- coding: utf-8 -*- +# coding: utf-8 +from __future__ import unicode_literals import unittest +import responses + from pyadmitad.items import Banners, BannersForWebsite from pyadmitad.tests.base import BaseTestCase @@ -8,89 +11,45 @@ class BannersTestCase(BaseTestCase): def test_get_banners_request(self): - self.set_mocker(Banners.URL, id=6, limit=1) - result = { - u'_meta': { - u'count': 5, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'banner_image': u'https://admitad.com/media/image.png', - u'creation_date': u'2013-01-18 20:13:27', - u'flashobj_url': u'', - u'id': 1, - u'image_url': u'', - u'is_flash': False, - u'name': u'Gmail Banner', - u'size_height': 39, - u'size_width': 94, - u'traffic_url': u'', - u'type': u'jpeg' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Banners.get(6, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Banners.URL, campaign_id=12, params={ + 'limit': 40, + 'offset': 10, + 'mobile_content': 'true' + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Banners.get(12, mobile_content=True, limit=40, offset=10) + + self.assertIn('status', result) class BannersForWebsiteTestCase(BaseTestCase): def test_get_banners_request(self): - self.set_mocker(BannersForWebsite.URL, id=6, w_id=22, limit=1) - result = { - u'_meta': { - u'count': 5, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'banner_image': u'https://admitad.com/media/image.png', - u'creation_date': u'2011-01-13 20:13:27', - u'direct_link': u'http://ad.admitad.com/goto/XXXXX/', - u'flashobj_url': u'', - u'html_code': { - u'async': u'see https://www.admitad.com/ru/doc/api/' - u'methods/banners/banners-website/', - u'flash': u'see https://www.admitad.com/ru/doc/api/' - u'methods/banners/banners-website/', - u'full': u'see https://www.admitad.com/ru/doc/api/' - u'methods/banners/banners-website/', - u'image': u'see https://www.admitad.com/ru/doc/api/' - u'methods/banners/banners-website/', - u'sync': u'see https://www.admitad.com/ru/doc/api/' - u'methods/banners/banners-website/', - }, - u'id': 1, - u'image_url': u'', - u'is_flash': False, - u'name': u'Gmail Banner', - u'size_height': 39, - u'size_width': 94, - u'traffic_url': u'', - u'type': u'jpeg' - } - ] - } - - self.mocker.result(result) - self.mocker.replay() - res = self.client.BannersForWebsite.get(6, 22, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BannersForWebsite.URL, campaign_id=12, website_id=10, params={ + 'limit': 40, + 'offset': 10, + 'mobile_content': 'true', + 'landing': 6, + 'uri_scheme': 'https' + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.BannersForWebsite.get(12, 10, mobile_content=True, + landing=6, uri_scheme='https', + limit=40, offset=10) + + self.assertIn('status', result) if __name__ == '__main__': diff --git a/pyadmitad/tests/test_base.py b/pyadmitad/tests/test_base.py new file mode 100644 index 0000000..55eaa3b --- /dev/null +++ b/pyadmitad/tests/test_base.py @@ -0,0 +1,166 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +from datetime import datetime, date + +from pyadmitad.items.base import Item +from pyadmitad.tests.base import BaseTestCase +from pyadmitad.constants import BASE_URL + + +class ItemTestCase(BaseTestCase): + + def test_sanitize_id(self): + self.assertEqual(Item.sanitize_id(2, ''), 2) + self.assertEqual(Item.sanitize_id(2**64, ''), 2**64) + self.assertEqual(Item.sanitize_id('64', ''), '64') + + with self.assertRaises(ValueError): + Item.sanitize_id(0, '') + Item.sanitize_id(None, '') + Item.sanitize_id(3.14, '') + Item.sanitize_id('foo', '') + + def test_sanitize_fields(self): + fields = { + 'field1': lambda x: Item.sanitize_non_blank_value(x, ''), + 'field2': lambda x: Item.sanitize_integer_value(x, ''), + 'field3': lambda x: Item.sanitize_string_value(x, '', blank=True), + } + + data = Item.sanitize_fields(fields, field1='foobarbaz', field2=42, field3='') + + self.assertDictEqual(data, { + 'field1': 'foobarbaz', + 'field2': 42, + 'field3': '' + }) + + data = Item.sanitize_fields(fields, field1='foobarbaz', field2=42, field3='', field4='another') + + self.assertDictEqual(data, { + 'field1': 'foobarbaz', + 'field2': 42, + 'field3': '' + }) + + def test_sanitize_non_blank_value(self): + self.assertEqual(Item.sanitize_non_blank_value(0, ''), 0) + self.assertEqual(Item.sanitize_non_blank_value('a', ''), 'a') + self.assertListEqual(Item.sanitize_non_blank_value([1], ''), [1]) + self.assertDictEqual(Item.sanitize_non_blank_value({'a': 1}, ''), {'a': 1}) + self.assertTupleEqual(Item.sanitize_non_blank_value((1, 2), ''), (1, 2)) + + with self.assertRaises(ValueError): + Item.sanitize_non_blank_value('', '') + Item.sanitize_non_blank_value([], '') + Item.sanitize_non_blank_value({}, '') + Item.sanitize_non_blank_value((), '') + Item.sanitize_non_blank_value(None, '') + + def test_sanitize_string_value(self): + self.assertEqual(Item.sanitize_string_value('foo', '', 10, None, False), 'foo') + self.assertEqual(Item.sanitize_string_value('foo', '', None, 2, False), 'foo') + self.assertEqual(Item.sanitize_string_value('foobarbaz', '', 10, 5, False), 'foobarbaz') + self.assertEqual(Item.sanitize_string_value('', '', None, None, True), '') + + with self.assertRaises(ValueError): + Item.sanitize_string_value('', '', None, None, False) + Item.sanitize_string_value('foo', '', 2, None, False) + Item.sanitize_string_value('foo', '', None, 5, False) + Item.sanitize_string_value('foobarbaz', '', 5, 6, False) + + def test_sanitize_integer_value(self): + self.assertEqual(Item.sanitize_integer_value(2, '', False), 2) + self.assertEqual(Item.sanitize_integer_value(0, '', False), 0) + self.assertEqual(Item.sanitize_integer_value(None, '', True), None) + self.assertEqual(Item.sanitize_integer_value(2**64, '', False), 2**64) + self.assertEqual(Item.sanitize_integer_value('64', '', False), '64') + + with self.assertRaises(ValueError): + Item.sanitize_integer_value(None, '', False) + Item.sanitize_integer_value(3.14, '', False) + Item.sanitize_integer_value('foo', '', False) + + def test_sanitize_float_value(self): + self.assertEqual(Item.sanitize_float_value(1, '', False), 1) + self.assertEqual(Item.sanitize_float_value(0, '', False), 0) + self.assertEqual(Item.sanitize_float_value('12', '', False), '12') + self.assertEqual(Item.sanitize_float_value('3.14', '', False), '3.14') + self.assertEqual(Item.sanitize_float_value(3.14, '', False), 3.14) + self.assertEqual(Item.sanitize_float_value(None, '', True), None) + + with self.assertRaises(ValueError): + Item.sanitize_float_value(None, '', False) + Item.sanitize_float_value('foo', '', False) + + def test_sanitize_integer_array(self): + self.assertEqual(Item.sanitize_integer_array(None, '', True), None) + self.assertEqual(Item.sanitize_integer_array([], '', True), []) + self.assertListEqual(Item.sanitize_integer_array([0, 1, '12'], '', False), [0, 1, '12']) + self.assertListEqual(Item.sanitize_integer_array([5, None, '1', None], '', True), [5, None, '1', None]) + self.assertListEqual(Item.sanitize_integer_array(5, ''), [5]) + + with self.assertRaises(ValueError): + Item.sanitize_integer_array(None, '', False) + Item.sanitize_integer_array([], '', False) + Item.sanitize_integer_array([1, 2, 3, None, 5], '', False) + + def test_sanitize_string_array(self): + self.assertEqual(Item.sanitize_string_array(None, '', None, None, True), None) + self.assertListEqual(Item.sanitize_string_array([], '', None, None, True), []) + self.assertListEqual(Item.sanitize_string_array('foo', ''), ['foo']) + self.assertListEqual(Item.sanitize_string_array([''], '', None, None, True), ['']) + self.assertListEqual(Item.sanitize_string_array(['foo', 'bar'], '', 10, 2, False), ['foo', 'bar']) + self.assertListEqual(Item.sanitize_string_array(['foo', 'bar'], '', None, None, False), ['foo', 'bar']) + + with self.assertRaises(ValueError): + Item.sanitize_string_array(None, '', False) + Item.sanitize_string_array([], '', False) + Item.sanitize_string_array([''], '', False) + Item.sanitize_string_array(['foobarbaz'], '', 5, 3, False) + Item.sanitize_string_array(['foobarbaz'], '', 5, None, False) + Item.sanitize_string_array(['foo'], '', None, 5, False) + + def test_sanitize_currency(self): + self.assertEqual(Item.sanitize_currency_value(None, True), None) + self.assertEqual(Item.sanitize_currency_value('', True), '') + self.assertEqual(Item.sanitize_currency_value('usd', False), 'USD') + self.assertEqual(Item.sanitize_currency_value('EUR', False), 'EUR') + + with self.assertRaises(ValueError): + Item.sanitize_currency_value(None, False) + Item.sanitize_currency_value('', False) + Item.sanitize_currency_value('foobarbaz', True) + Item.sanitize_currency_value('12', True) + + def test_sanitize_date(self): + self.assertEqual(Item.sanitize_date(None, '', True), None) + self.assertEqual(Item.sanitize_date(datetime(2020, 1, 1), '', False), '01.01.2020') + self.assertEqual(Item.sanitize_date(date(2020, 1, 1), '', False), '01.01.2020') + self.assertEqual(Item.sanitize_date('01.01.2020', '', False), '01.01.2020') + + with self.assertRaises(ValueError): + Item.sanitize_date(None, '', False) + Item.sanitize_date('01/01/2020', '', True) + + def test_sanitize_long_date(self): + self.assertEqual(Item.sanitize_long_date(None, '', True), None) + self.assertEqual(Item.sanitize_long_date(datetime(2020, 1, 1, 11, 20, 36), '', False), '01.01.2020 11:20:36') + self.assertEqual(Item.sanitize_long_date('01.01.2020 11:20:36', '', False), '01.01.2020 11:20:36') + + with self.assertRaises(ValueError): + Item.sanitize_long_date(None, '', False) + Item.sanitize_long_date('01/01/2020', '', True) + Item.sanitize_long_date('01.01.2020 11/22/22', '', False) + + def test_prepare_url(self): + self.assertEqual(Item.prepare_url('somepath'), '%ssomepath/' % BASE_URL) + self.assertEqual(Item.prepare_url('somepath/'), '%ssomepath/' % BASE_URL) + self.assertEqual(Item.prepare_url('/somepath'), '%ssomepath/' % BASE_URL) + self.assertEqual(Item.prepare_url('/somepath/'), '%ssomepath/' % BASE_URL) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_broken_links.py b/pyadmitad/tests/test_broken_links.py new file mode 100644 index 0000000..8ba03ec --- /dev/null +++ b/pyadmitad/tests/test_broken_links.py @@ -0,0 +1,73 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from pyadmitad.items import BrokenLinks, ManageBrokenLinks +from pyadmitad.tests.base import BaseTestCase + + +class BrokenLinksTestCase(BaseTestCase): + + def test_get_broken_links_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BrokenLinks.URL, params={ + 'limit': 50, + 'offset': 2, + 'website': [1, 2, 3], + 'campaign': [1, 2], + 'search': 'some', + 'reason': 0, + 'date_start': '01.01.2010', + 'date_end': '01.01.2020' + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.BrokenLinks.get( + website=[1, 2, 3], campaign=[1, 2], + search='some', reason=0, + date_start='01.01.2010', date_end='01.01.2020', + limit=50, offset=2 + ) + + self.assertIn('status', result) + + def test_get_single_broken_link_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BrokenLinks.SINGLE_URL, broken_link_id=10), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.BrokenLinks.getOne(10) + + self.assertIn('status', result) + + +class ManageBrokenLinksTestCase(BaseTestCase): + + def test_resolve_broken_link_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(ManageBrokenLinks.RESOLVE_URL), + json={'status': 'ok'}, + status=200 + ) + + result = self.client.ManageBrokenLinks.resolve([10, 20]) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_campaigns.py b/pyadmitad/tests/test_campaigns.py index b040209..e3f857a 100644 --- a/pyadmitad/tests/test_campaigns.py +++ b/pyadmitad/tests/test_campaigns.py @@ -1,310 +1,109 @@ -# -*- coding: utf-8 -*- +# coding: utf-8 +from __future__ import unicode_literals import unittest -from pyadmitad.items import Campaigns, CampaignsForWebsite,\ +import responses + +from pyadmitad.items import Campaigns, CampaignsForWebsite, \ CampaignsManage from pyadmitad.tests.base import BaseTestCase -CAMPAIGNS_RESULT = { - "results": [ - { - "status": "active", - "rating": "5.00", - "description": "Gmail is a mail service by google", - "actions": [ - { - "payment_size": "50.00", - "hold_time": 120, - "percentage": True, - "name": "action name", - "id": 1 - }, - { - "payment_size": "12.00", - "hold_time": 30, - "percentage": True, - "name": "Покупка", - "id": 15 - }, - { - "payment_size": "11.00", - "hold_time": 15, - "percentage": False, - "name": "Регистрация", - "id": 11 - } - ], - "site_url": "http://www.gmail.com/", - "regions": [ - { - "region": "01" - }, - { - "region": "BY" - }, - { - "region": "CA" - }, - { - "region": "DE" - }, - { - "region": "KZ" - }, - { - "region": "RU" - }, - { - "region": "US" - } - ], - "currency": "USD", - "cr": None, - "ecpc": None, - "id": 6, - "categories": [ - { - "name": "Магазин", - "parent": None, - "id": 1 - }, - { - "name": "Онлайн-игры", - "parent": None, - "id": 2 - }, - { - "name": "Браузерные", - "parent": { - "name": "Онлайн-игры", - "parent": None, - "id": 2 - }, - "id": 3 - } - ], - "name": "Campaign2" - } - ], - "_meta": { - "count": 4, - "limit": 1, - "offset": 0 - } -} - -CAMPAIGNS_FOR_WEBSITE_RESULT = { - "results": [ - { - "status": "active", - "rating": "5.00", - "traffics": [ - { - "enabled": False, - "name": "Тип 1", - "id": 1 - }, - { - "enabled": False, - "name": "Тип 2", - "id": 2 - } - ], - "ecpc": None, - "description": "Gmail is a mail service by google", - "name": "AdvCamp 1", - "gotolink": "http://ad.admitad.com/goto/some_link/", - "avg_hold_time": None, - "actions": [ - { - "payment_size": "50.00", - "hold_time": 120, - "percentage": None, - "name": "action name", - "id": 1 - }, - { - "payment_size": "12.00", - "hold_time": 30, - "percentage": True, - "name": "Покупка", - "id": 15 - }, - { - "payment_size": "11.00", - "hold_time": 15, - "percentage": True, - "name": "Регистрация", - "id": 11 - } - ], - "site_url": "http://www.gmail.com/", - "regions": [ - { - "region": "01" - }, - { - "region": "BY" - }, - { - "region": "CA" - }, - { - "region": "DE" - }, - { - "region": "KZ" - }, - { - "region": "RU" - }, - { - "region": "US" - } - ], - "currency": "USD", - "goto_cookie_lifetime": 45, - "geotargeting": True, - "cr": None, - "activation_date": "2010-03-31 19:05:39", - "max_hold_time": 120, - "id": 6, - "categories": [ - { - "name": "Магазин", - "parent": None, - "id": 1 - }, - { - "name": "Онлайн-игры", - "parent": None, - "id": 2 - }, - { - "name": "Браузерные", - "parent": { - "name": "Онлайн-игры", - "parent": None, - "id": 2 - }, - "id": 3 - }, - { - "name": "Другая", - "parent": None, - "id": 5 - }, - { - "name": "Финансы", - "parent": { - "name": "Другая", - "parent": None, - "id": 5 - }, - "id": 6 - }, - { - "name": "Подкатегория", - "parent": { - "name": "Другая", - "parent": None, - "id": 5 - }, - "id": 17 - } - ], - "percentage_of_confirmed": None - } - ], - "_meta": { - "count": 4, - "limit": 1, - "offset": 0 - } -} - -CAMPAIGN_CONNECT_RESULT = { - "message": "Заявка на добавление кампании Campaign успешно создана.", - "success": "OK" -} - -CAMPAIGN_DISCONNECT_RESULT = { - "message": "Кампания Campaign была удалена из ваших предложений." - " Вы можете позже добавить ее снова.", - "success": "Deleted" -} - - class CampaignsTestCase(BaseTestCase): def test_get_campaigns_request(self): - self.set_mocker(Campaigns.URL, limit=1) - self.mocker.result(CAMPAIGNS_RESULT) - self.mocker.replay() - res = self.client.Campaigns.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Campaigns.URL, params={ + 'website': 10, + 'has_tool': ['deeplink', 'retag'], + 'limit': 10, + 'offset': 0, + 'language': 'en' + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Campaigns.get(website=10, has_tool=['deeplink', 'retag'], + limit=10, offset=0, language='en') + + self.assertIn('status', result) def test_get_campaigns_request_with_id(self): - self.set_mocker(Campaigns.SINGLE_URL, id=6, with_pagination=False) - self.mocker.result(CAMPAIGNS_RESULT['results'][0]) - self.mocker.replay() - res = self.client.Campaigns.getOne(6) - self.assertEqual(res[u'id'], 6) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Campaigns.SINGLE_URL, campaign_id=10), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Campaigns.getOne(10) + + self.assertIn('status', result) class CampaignsForWebsiteTestCase(BaseTestCase): def test_get_campaigns_for_websites_request(self): - self.set_mocker(CampaignsForWebsite.URL, id=22, limit=1) - self.mocker.result(CAMPAIGNS_FOR_WEBSITE_RESULT) - self.mocker.replay() - res = self.client.CampaignsForWebsite.get(22, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CampaignsForWebsite.URL, website_id=16, params={ + 'limit': 26, + 'offset': 10 + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CampaignsForWebsite.get(16, limit=26, offset=10) + + self.assertIn('status', result) def test_get_campaigns_request_with_id(self): - self.set_mocker( - CampaignsForWebsite.SINGLE_URL, id=22, - c_id=6, with_pagination=False) - self.mocker.result(CAMPAIGNS_FOR_WEBSITE_RESULT['results'][0]) - self.mocker.replay() - res = self.client.CampaignsForWebsite.getOne(22, 6) - self.assertEqual(res[u'id'], 6) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CampaignsForWebsite.SINGLE_URL, website_id=10, campaign_id=88), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CampaignsForWebsite.getOne(10, 88) + + self.assertIn('status', result) class CampaignsConnectWebsiteTestCase(BaseTestCase): def test_campaign_connect_websites_request(self): - self.set_mocker(CampaignsManage.CONNECT_URL, w_id=22, - c_id=6, with_pagination=False, method='POST') - self.mocker.result(CAMPAIGN_CONNECT_RESULT) - self.mocker.replay() - res = self.client.CampaignsManage.connect(c_id=6, w_id=22) - self.assertIn(u'message', res) - self.assertIn(u'success', res) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(CampaignsManage.CONNECT_URL, campaign_id=10, website_id=22), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CampaignsManage.connect(10, 22) + + self.assertIn('status', result) def test_campaign_disconnect_websites_request(self): - self.set_mocker(CampaignsManage.DISCONNECT_URL, w_id=22, - c_id=6, with_pagination=False, method='POST') - self.mocker.result(CAMPAIGN_CONNECT_RESULT) - self.mocker.replay() - res = self.client.CampaignsManage.disconnect(c_id=6, w_id=22) - self.assertIn(u'message', res) - self.assertIn(u'success', res) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(CampaignsManage.DISCONNECT_URL, campaign_id=10, website_id=22), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CampaignsManage.disconnect(10, 22) + + self.assertIn('status', result) if __name__ == '__main__': diff --git a/pyadmitad/tests/test_coupons.py b/pyadmitad/tests/test_coupons.py index d9caaf4..992fd24 100644 --- a/pyadmitad/tests/test_coupons.py +++ b/pyadmitad/tests/test_coupons.py @@ -1,222 +1,123 @@ -# -*- coding: utf-8 -*- +# coding: utf-8 +from __future__ import unicode_literals import unittest -from pyadmitad.items import Coupons, CouponsForWebsite +import responses + +from pyadmitad.items import Coupons, CouponsForWebsite, CouponsCategories from pyadmitad.tests.base import BaseTestCase class CouponsTestCase(BaseTestCase): def test_get_coupons_request(self): - self.set_mocker(Coupons.URL, limit=1) - result = { - u'results': [ - { - u'campaign': { - u'id': 8, - u'name': u'AdvCamp 3' - }, - u'categories': [ - { - u'id': 1, - u'name': u'Детские товары' - }, - { - u'id': 3, - u'name': u'Мода & аксессуары' - }, - { - u'id': 4, - u'name': u'Обувь женская & мужская' - } - ], - u'date_end': u'2013-05-10 23:59:59', - u'date_start': u'2011-11-02 00:00:00', - u'description': u'', - u'exclusive': False, - u'id': 1, - u'image': u'https://admitad.com/media/path_img.png', - u'name': u'Купон', - u'rating': u'0.00', - u'short_name': u'coupon', - u'species': u'promocode', - u'status': u'active', - u'types': [ - { - u'id': 1, - u'name': u'Бесплатная доставка' - } - ] - } - ], - u'_meta': { - u'count': 6, - u'limit': 1, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Coupons.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Coupons.URL, params={ + 'campaign': [1, 5, 6], + 'campaign_category': [11, 12], + 'category': [22, 23], + 'type': 'some', + 'limit': 10, + 'offset': 0, + 'order_by': ['name', '-rating'] + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Coupons.get( + campaign=[1, 5, 6], campaign_category=[11, 12], + category=[22, 23], type='some', limit=10, offset=0, + order_by=['name', '-rating']) + + self.assertIn('status', result) def test_get_coupons_request_with_id(self): - self.set_mocker(Coupons.SINGLE_URL, id=1, with_pagination=False) - result = { - u'campaign': { - u'id': 8, - u'name': u'AdvCamp 3' - }, - u'categories': [ - { - u'id': 1, - u'name': u'Детские товары' - }, - { - u'id': 3, - u'name': u'Мода & аксессуары' - }, - { - u'id': 4, - u'name': u'Обувь женская & мужская' - } - ], - u'date_end': u'2013-05-10 23:59:59', - u'date_start': u'2011-11-02 00:00:00', - u'description': u'', - u'exclusive': False, - u'id': 1, - u'image': u'https://admitad.com/media/path_img.png', - u'name': u'Купон', - u'rating': u'0.00', - u'short_name': u'coupon', - u'species': u'promocode', - u'status': u'active', - u'types': [ - { - u'id': 1, - u'name': u'Бесплатная доставка' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Coupons.getOne(1) - self.assertEqual(res[u'id'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Coupons.SINGLE_URL, coupon_id=42), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Coupons.getOne(42) + + self.assertIn('status', result) class CouponsForWebsiteTestCase(BaseTestCase): def test_get_coupons_for_website_request(self): - self.set_mocker(CouponsForWebsite.URL, id=3, limit=1) - result = { - u'results': [ - { - u'campaign': { - u'id': 8, - u'name': u'AdvCamp 3' - }, - u'categories': [ - { - u'id': 1, - u'name': u'Детские товары' - }, - { - u'id': 3, - u'name': u'Мода & аксессуары' - }, - { - u'id': 4, - u'name': u'Обувь женская & мужская' - } - ], - u'date_end': u'2013-05-10 23:59:59', - u'date_start': u'2011-11-02 00:00:00', - u'description': u'', - u'exclusive': False, - u'id': 1, - u'image': u'https://admitad.com/media/path_img.png', - u'name': u'Купон', - u'rating': u'0.00', - u'short_name': u'coupon', - u'species': u'promocode', - u'status': u'active', - u'types': [ - { - u'id': 1, - u'name': u'Бесплатная доставка' - } - ] - } - ], - u'_meta': { - u'count': 6, - u'limit': 1, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.CouponsForWebsite.get(_id=3, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CouponsForWebsite.URL, website_id=1, params={ + 'campaign': [1, 5, 6], + 'campaign_category': [11, 12], + 'category': [22, 23], + 'type': 'some', + 'limit': 10, + 'offset': 0, + 'order_by': ['name', '-rating'] + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CouponsForWebsite.get( + 1, campaign=[1, 5, 6], campaign_category=[11, 12], + category=[22, 23], type='some', limit=10, offset=0, + order_by=['name', '-rating']) + + self.assertIn('status', result) def test_get_coupons_for_website_request_with_id(self): - self.set_mocker( - CouponsForWebsite.SINGLE_URL, id=3, c_id=1, with_pagination=False) - result = { - u'campaign': { - u'id': 8, - u'name': u'AdvCamp 3' - }, - u'categories': [ - { - u'id': 1, - u'name': u'Детские товары' - }, - { - u'id': 3, - u'name': u'Мода & аксессуары' - }, - { - u'id': 4, - u'name': u'Обувь женская & мужская' - } - ], - u'date_end': u'2013-05-10 23:59:59', - u'date_start': u'2011-11-02 00:00:00', - u'description': u'', - u'exclusive': False, - u'id': 1, - u'image': u'https://admitad.com/media/path_img.png', - u'name': u'Купон', - u'rating': u'0.00', - u'short_name': u'coupon', - u'species': u'promocode', - u'status': u'active', - u'types': [ - { - u'id': 1, - u'name': u'Бесплатная доставка' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.CouponsForWebsite.getOne(3, 1) - self.assertEqual(res[u'id'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CouponsForWebsite.SINGLE_URL, website_id=10, campaign_id=20), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CouponsForWebsite.getOne(10, 20) + + self.assertIn('status', result) + + +class CouponsCategoriesTestCase(BaseTestCase): + + def test_get_categories_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CouponsCategories.URL, params={ + 'limit': 10, + 'offset': 0 + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CouponsCategories.get(limit=10, offset=0) + + self.assertIn('status', result) + + def test_get_categorty_with_id_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CouponsCategories.SINGLE_URL, coupon_category_id=200), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CouponsCategories.getOne(200) + self.assertIn('status', result) if __name__ == '__main__': unittest.main() diff --git a/pyadmitad/tests/test_deeplinks.py b/pyadmitad/tests/test_deeplinks.py new file mode 100644 index 0000000..7043d20 --- /dev/null +++ b/pyadmitad/tests/test_deeplinks.py @@ -0,0 +1,32 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from pyadmitad.tests.base import BaseTestCase +from pyadmitad.items import DeeplinksManage + + +class DeeplinksManageTestCase(BaseTestCase): + + def test_deeplinks_create_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(DeeplinksManage.CREATE_URL, website_id=9, campaign_id=10, params={ + 'subid': '0987654321234567890', + 'ulp': 'https://google.com/' + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.DeeplinksManage.create(9, 10, subid='0987654321234567890', ulp='https://google.com/') + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_landings.py b/pyadmitad/tests/test_landings.py new file mode 100644 index 0000000..507b62e --- /dev/null +++ b/pyadmitad/tests/test_landings.py @@ -0,0 +1,52 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from pyadmitad.items import Landings, LandingsForWebsite +from pyadmitad.tests.base import BaseTestCase + + +class LandingsTestCase(BaseTestCase): + + def test_landings_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Landings.URL, campaign_id=8, params={ + 'limit': 2, + 'offset': 0 + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Landings.get(8, limit=2, offset=0) + + self.assertIn('status', result) + + +class LandingsForWebsiteTestCase(BaseTestCase): + + def test_landings_for_website_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(LandingsForWebsite.URL, campaign_id=8, website_id=11, params={ + 'limit': 1, + 'offset': 0 + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.LandingsForWebsite.get(8, 11, limit=1) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_links.py b/pyadmitad/tests/test_links.py new file mode 100644 index 0000000..59fbf5f --- /dev/null +++ b/pyadmitad/tests/test_links.py @@ -0,0 +1,35 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from pyadmitad.items import LinksValidator +from pyadmitad.tests.base import BaseTestCase + + +class LinksValidationTestCase(BaseTestCase): + + def test_link_validation_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(LinksValidator.URL, params={ + 'link': 'https://google.com/' + }), + match_querystring=True, + json={ + 'message': 'Link tested.', + 'success': 'Accepted' + }, + status=200 + ) + + result = self.client.LinksValidator.get('https://google.com/') + + self.assertIn('message', result) + self.assertIn('success', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_lost_orders.py b/pyadmitad/tests/test_lost_orders.py new file mode 100644 index 0000000..b728342 --- /dev/null +++ b/pyadmitad/tests/test_lost_orders.py @@ -0,0 +1,84 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from pyadmitad.items import LostOrders, LostOrdersManager +from pyadmitad.tests.base import BaseTestCase + + +class LostOrdersTestCase(BaseTestCase): + + def test_get_lost_orders_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(LostOrders.URL, params={ + 'limit': 20, + 'offset': 1 + }), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.LostOrders.get( + limit=20, + offset=1 + ) + + self.assertIn('status', result) + + def test_get_lost_order_by_id_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(LostOrders.SINGLE_URL, lost_order_id=12), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.LostOrders.getOne(12) + + self.assertIn('status', result) + + +class LostOrdersManagerTestCase(BaseTestCase): + + def test_create_lost_order(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(LostOrdersManager.CREATE_URL), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.LostOrdersManager.create( + attachments=['./pyadmitad/tests/data/image.png'], + website=10, + advcampaign=20, + order_id='asd3f3', + order_date='01.01.2010', + order_price=1200, + comment='foo bar baz' + ) + + self.assertIn('status', result) + + def test_delete_lost_order(self): + with responses.RequestsMock() as resp: + resp.add( + resp.DELETE, + self.prepare_url(LostOrdersManager.DELETE_URL, lost_order_id=2), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.LostOrdersManager.delete(2) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_me.py b/pyadmitad/tests/test_me.py index 4469e09..70efa32 100644 --- a/pyadmitad/tests/test_me.py +++ b/pyadmitad/tests/test_me.py @@ -1,54 +1,147 @@ +# coding: utf-8 +from __future__ import unicode_literals + import unittest -from pyadmitad.items import * +import responses + +from pyadmitad.items import Me, Balance, PaymentsSettings from pyadmitad.tests.base import BaseTestCase class MeTestCase(BaseTestCase): def test_me_request(self): - self.set_mocker(Me.URL, with_pagination=False) - result = { - 'username': 'username', - 'first_name': 'first_name', - 'last_name': 'last_name', - 'id': 1, - 'language': 'ru' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Me.get() - self.assertEqual(res['username'], 'username') - self.assertEqual(res['first_name'], 'first_name') - self.assertEqual(res['id'], 1) - self.assertEqual(res['language'], 'ru') - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Me.URL), + json={ + 'id': 1, + 'username': 'username', + 'first_name': 'first_name', + 'last_name': 'last_name', + 'language': 'ru' + }, + status=200 + ) + + result = self.client.Me.get() + + self.assertEqual(result['id'], 1) + self.assertEqual(result['username'], 'username') + self.assertEqual(result['first_name'], 'first_name') + self.assertEqual(result['last_name'], 'last_name') + self.assertEqual(result['language'], 'ru') class BalanceTestCase(BaseTestCase): def test_balance_request(self): - self.set_mocker(Balance.URL, with_pagination=False) - result = [ - { - 'currency': 'USD', - 'balance': '20000.00' - }, - { - 'currency': 'EUR', - 'balance': '0.00' - }, - { - 'currency': 'RUB', - 'balance': '0.00' - } - ] - self.mocker.result(result) - self.mocker.replay() - res = self.client.Balance.get() - self.assertEqual(len(res), 3) - self.assertIn('balance', res[0]) - self.assertIn('currency', res[0]) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Balance.URL), + json=[{ + 'currency': 'USD', + 'balance': '20000.00' + }, { + 'currency': 'EUR', + 'balance': '0.00' + }], + status=200 + ) + + result = self.client.Balance.get() + + self.assertEqual(len(result), 2) + for item in result: + self.assertIn('balance', item) + self.assertIn('currency', item) + + def test_balance_extended_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Balance.EXTENDED_URL), + json=[{ + 'currency': 'USD', + 'balance': '20000.00', + 'processing': '20.00', + 'today': '0.00', + 'stalled': '100.00' + }, { + 'currency': 'EUR', + 'balance': '0.00', + 'processing': '2100.00', + 'today': '0.00', + 'stalled': '0.00' + }], + status=200 + ) + + result = self.client.Balance.get(extended=True) + + self.assertEqual(len(result), 2) + for item in result: + self.assertIn('balance', item) + self.assertIn('currency', item) + self.assertIn('processing', item) + self.assertIn('today', item) + self.assertIn('stalled', item) + + +class PaymentsSettingsTestCase(BaseTestCase): + + def test_payments_settings_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(PaymentsSettings.URL), + json=[{ + 'id': 11, + 'name': 'some_name', + 'currency': ['USD'], + 'withdrawal_type': 'webmoney' + }, { + 'id': 18, + 'name': 'some_another', + 'currency': ['EUR'], + 'withdrawal_type': 'paypal' + }], + status=200 + ) + + result = self.client.PaymentsSettings.get() + + self.assertEqual(len(result), 2) + for item in result: + self.assertIn('id', item) + self.assertIn('name', item) + self.assertIn('currency', item) + self.assertIn('withdrawal_type', item) + + def test_payments_settings_usd_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(PaymentsSettings.CURRENCY_URL, currency='USD'), + json=[{ + 'id': 11, + 'name': 'some_name', + 'currency': ['USD'], + 'withdrawal_type': 'webmoney' + }], + status=200 + ) + + result = self.client.PaymentsSettings.get(currency='USD') + + self.assertEqual(len(result), 1) + for item in result: + self.assertIn('id', item) + self.assertIn('name', item) + self.assertIn('currency', item) + self.assertIn('withdrawal_type', item) if __name__ == '__main__': diff --git a/pyadmitad/tests/test_news.py b/pyadmitad/tests/test_news.py new file mode 100644 index 0000000..e535d2d --- /dev/null +++ b/pyadmitad/tests/test_news.py @@ -0,0 +1,91 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from pyadmitad.items import News +from pyadmitad.tests.base import BaseTestCase + + +class AnnouncementsTestCase(BaseTestCase): + + def test_get_announcements_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(News.URL, params={ + 'limit': 2, + 'offset': 2 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 50, + 'limit': 2, + 'offset': 2 + }, + 'results': [{ + 'id': 12, + 'url': '', + 'language': 'en', + 'content': '

full text

', + 'short_content': 'short text', + 'advcampaign': { + 'id': 18, + 'name': 'AdvCamp' + }, + 'datetime': '2009-12-02T23:08:45' + }, { + 'id': 16, + 'url': '', + 'language': 'en', + 'content': '

full text 2

', + 'short_content': 'short text 2', + 'advcampaign': { + 'id': 18, + 'name': 'AdvCamp' + }, + 'datetime': '2009-12-02T23:09:00' + }] + }, + status=200 + ) + + result = self.client.News.get(limit=2, offset=2) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(2, len(result['results'])) + + def test_get_announcements_request_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(News.SINGLE_URL, news_id=12), + match_querystring=True, + json={ + 'id': 12, + 'url': '', + 'language': 'en', + 'content': '

full text

', + 'short_content': 'short text', + 'advcampaign': { + 'id': 18, + 'name': 'AdvCamp' + }, + 'datetime': '2009-12-02T23:08:45' + }, + status=200 + ) + + result = self.client.News.getOne(12) + + self.assertIn('id', result) + self.assertIn('url', result) + self.assertIn('content', result) + self.assertIn('datetime', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_optcodes.py b/pyadmitad/tests/test_optcodes.py new file mode 100644 index 0000000..bb61e28 --- /dev/null +++ b/pyadmitad/tests/test_optcodes.py @@ -0,0 +1,137 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from pyadmitad.items import OptCodes, OfferStatusOptCodesManager, ActionOptCodesManager +from pyadmitad.tests.base import BaseTestCase + + +class OptCodeTestCase(BaseTestCase): + + def test_get_opt_codes_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(OptCodes.URL, params={ + 'campaign': 10, + 'website': 20, + 'limit': 1, + 'offset': 0, + 'order_by': ['method'] + }), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.OptCodes.get( + campaign=10, + website=20, + limit=1, + offset=0, + order_by=['method'] + ) + + self.assertIn('status', result) + + def test_get_opt_code_by_id_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(OptCodes.SINGLE_URL, optcode_id=12), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.OptCodes.getOne(12) + + self.assertIn('status', result) + + +class OffserStatusOptCodesManagerTestCase(BaseTestCase): + + def test_create_opt_code(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(OfferStatusOptCodesManager.CREATE_URL), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.OfferStatusOptCodesManager.create( + website=10, + campaign=20, + desc_mode=1, + url='https://google.com', + method=1 + ) + + self.assertIn('status', result) + + def test_update_opt_code(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(OfferStatusOptCodesManager.UPDATE_URL, optcode_id=2), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.OfferStatusOptCodesManager.update( + 2, + desc_mode=2, + url='https://google.com/', + method=2 + ) + + self.assertIn('status', result) + + +class ActionOptCodesManagerTestCase(BaseTestCase): + + def test_create_opt_code(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(ActionOptCodesManager.CREATE_URL), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.ActionOptCodesManager.create( + website=10, + campaign=20, + desc_mode=1, + url='https://google.com', + method=1, + action_type=1, + status=1 + ) + + self.assertIn('status', result) + + def test_update_opt_code(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(ActionOptCodesManager.UPDATE_URL, optcode_id=77), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.ActionOptCodesManager.update( + 77, + desc_mode=2, + url='https://google.com/', + method=2, + action_type=2, + status=1 + ) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_payments.py b/pyadmitad/tests/test_payments.py index 62c406e..14c4c00 100644 --- a/pyadmitad/tests/test_payments.py +++ b/pyadmitad/tests/test_payments.py @@ -1,115 +1,106 @@ -# -*- coding: utf-8 -*- +# coding: utf-8 +from __future__ import unicode_literals import unittest -from pyadmitad.items import Payments, PaymentsManage +import responses + +from pyadmitad.items import Payments, PaymentsStatement, PaymentsManage +from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET from pyadmitad.tests.base import BaseTestCase class PaymentsTestCase(BaseTestCase): def test_get_payments_request(self): - self.set_mocker(Payments.URL, limit=1) - result = { - u'_meta': { - u'count': 6, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'comment': u'', - u'currency': u'USD', - u'datetime': u'2012-05-27 19:45:07', - u'id': 68, - u'payment_sum': u'2000.00', - u'status': u'pending', - u'withdrawal_type': u'webmoney' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Payments.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(res[u'results'][0][u'currency'], u'USD') - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Payments.URL, params={ + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Payments.get() + + self.assertIn('status', result) def test_get_payments_request_with_id(self): - self.set_mocker(Payments.SINGLE_URL, id=68, with_pagination=False) - result = { - u'comment': u'', - u'currency': u'USD', - u'datetime': u'2012-05-27 19:45:07', - u'id': 68, - u'payment_sum': u'2000.00', - u'status': u'pending', - u'withdrawal_type': u'webmoney' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Payments.getOne(68) - self.assertEqual(res[u'id'], 68) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Payments.SINGLE_URL, payment_id=167), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Payments.getOne(167) + + self.assertIn('status', result) + + +class PaymentsStatementTestCase(BaseTestCase): + + def test_get_payments_statement_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(PaymentsStatement.URL, payment_id=12, params={ + 'detailed': 1, + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.PaymentsStatement.get(12, detailed=True) + + self.assertIn('status', result) class PaymentsManageTestCase(BaseTestCase): def test_create_payments_request(self): - self.set_mocker(PaymentsManage.CREATE_URL, - method='POST', with_pagination=False, code='EUR') - result = { - u'comment': u'', - u'currency': u'EUR', - u'datetime': u'2013-04-24 15:07:47', - u'id': 71, - u'payment_sum': u'10000', - u'status': u'draft', - u'withdrawal_type': u'' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.PaymentsManage.create('EUR') - self.assertEqual(res[u'status'], u'draft') - self.assertEqual(res[u'currency'], u'EUR') - self.assertEqual(res[u'id'], 71) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(PaymentsManage.CREATE_URL, code='USD'), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.PaymentsManage.create('USD') + + self.assertIn('status', result) def test_confirm_payments_request(self): - self.set_mocker(PaymentsManage.CONFIRM_URL, - method='POST', with_pagination=False, id=71) - result = { - u'comment': u'', - u'currency': u'EUR', - u'datetime': u'2013-04-24 15:07:47', - u'id': 71, - u'payment_sum': u'10000.00', - u'status': u'pending', - u'withdrawal_type': u'webmoney' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.PaymentsManage.confirm(71) - self.assertEqual(res[u'status'], u'pending') - self.assertEqual(res[u'currency'], u'EUR') - self.assertEqual(res[u'id'], 71) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(PaymentsManage.CONFIRM_URL, payment_id=98), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.PaymentsManage.confirm(98) + + self.assertIn('status', result) def test_delete_payments_request(self): - self.set_mocker(PaymentsManage.DELETE_URL, - method='POST', with_pagination=False, id=71) - result = { - u'message': u'Заявка удалена успешно.', - u'success': u'Deleted' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.PaymentsManage.delete(71) - self.assertIn('success', res) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(PaymentsManage.DELETE_URL, payment_id=98), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.PaymentsManage.delete(98) + + self.assertIn('status', result) if __name__ == '__main__': diff --git a/pyadmitad/tests/test_referrals.py b/pyadmitad/tests/test_referrals.py index b76a23b..b75b26c 100644 --- a/pyadmitad/tests/test_referrals.py +++ b/pyadmitad/tests/test_referrals.py @@ -1,49 +1,109 @@ -# -*- coding: utf-8 -*- +# coding: utf-8 +from __future__ import unicode_literals import unittest +from datetime import datetime + +import responses + from pyadmitad.items import Referrals +from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET from pyadmitad.tests.base import BaseTestCase class ReferralsTestCase(BaseTestCase): def test_get_referrals_request(self): - self.set_mocker(Referrals.URL, limit=1) - result = { - u'_meta': { - u'count': 2, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'id': 8, - u'payment': None, - u'username': u'username' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Referrals.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Referrals.URL, params={ + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={ + '_meta': { + 'count': 2, + 'limit': 20, + 'offset': 0 + }, + 'results': [{ + 'id': 8, + 'payment': None, + 'username': 'username1' + }, { + 'id': 10, + 'payment': None, + 'username': 'username2' + }] + }, + status=200 + ) + + result = self.client.Referrals.get() + + self.assertIn('results', result) + self.assertIn('_meta', result) + self.assertIsInstance(result['results'], list) + self.assertIsInstance(result['_meta'], dict) + self.assertEqual(result['_meta']['limit'], 20) + + def test_get_referrals_with_filters_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Referrals.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.01.2020', + 'limit': 40, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={ + '_meta': { + 'count': 2, + 'limit': 40, + 'offset': 0 + }, + 'results': [{ + 'id': 8, + 'payment': None, + 'username': 'username1' + }, { + 'id': 10, + 'payment': None, + 'username': 'username2' + }] + }, + status=200 + ) + + result = self.client.Referrals.get(date_start=datetime(2010, 1, 1), date_end=datetime(2020, 1, 1), limit=40) + + self.assertIn('results', result) + self.assertIn('_meta', result) + self.assertIsInstance(result['results'], list) + self.assertIsInstance(result['_meta'], dict) + self.assertEqual(result['_meta']['limit'], 40) def test_get_referrals_request_with_id(self): - self.set_mocker(Referrals.SINGLE_URL, id=8, with_pagination=False) - result = { - u'id': 8, - u'payment': None, - u'username': u'username'} - self.mocker.result(result) - self.mocker.replay() - res = self.client.Referrals.getOne(8) - self.assertEqual(res[u'id'], 8) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Referrals.SINGLE_URL, referral_id=8), + match_querystring=True, + json={ + 'id': 8, + 'payment': None, + 'username': 'username1' + }, + status=200 + ) + + result = self.client.Referrals.getOne(8) + + self.assertEqual(result['id'], 8) if __name__ == '__main__': diff --git a/pyadmitad/tests/test_retag.py b/pyadmitad/tests/test_retag.py new file mode 100644 index 0000000..7960a7e --- /dev/null +++ b/pyadmitad/tests/test_retag.py @@ -0,0 +1,130 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from pyadmitad.items import Retag, RetagManager +from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from pyadmitad.tests.base import BaseTestCase + + +class RetagTestCase(BaseTestCase): + + def test_retag_get_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Retag.URL, params={ + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET, + 'website': 10, + 'active': 1 + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Retag.get(website=10, active=True) + + self.assertIn('status', result) + + def test_retag_get_single_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Retag.SINGLE_URL, retag_id=11), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Retag.getOne(11) + + self.assertIn('status', result) + + def test_retag_get_levels_for_campaign_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Retag.LEVELS_FOR_CAMPAIGN_URL, campaign_id=20), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Retag.getLevelsForCampaign(20) + + self.assertIn('status', result) + + def test_retag_get_levels_for_website_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Retag.LEVELS_FOR_WEBSITE_URL, website_id=78), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Retag.getLevelsForWebsite(78) + + self.assertIn('status', result) + + +class ManageRetagTestCase(BaseTestCase): + + def test_retag_create(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(RetagManager.CREATE_URL), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.RetagManager.create( + website=10, + level=2, + active=False, + script='print', + comment='some comment' + ) + + self.assertIn('status', result) + + def test_retag_update(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(RetagManager.UPDATE_URL, retag_id=50), + json={'status': 'ok'}, + status=200 + ) + + result = self.client.RetagManager.update( + 50, + level=4, + active=True + ) + + self.assertIn('status', result) + + def test_retag_delete(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(RetagManager.DELETE_URL, retag_id=50), + json={'status': 'ok'}, + status=200 + ) + + result = self.client.RetagManager.delete(50) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_statistics.py b/pyadmitad/tests/test_statistics.py index c7713e7..ec84353 100644 --- a/pyadmitad/tests/test_statistics.py +++ b/pyadmitad/tests/test_statistics.py @@ -1,406 +1,296 @@ -# -*- coding: utf-8 -*- +# coding: utf-8 +from __future__ import unicode_literals import unittest +import responses + from pyadmitad.items import StatisticWebsites, StatisticCampaigns,\ StatisticDays, StatisticMonths, StatisticActions, StatisticSubIds,\ StatisticSources, StatisticKeywords +from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET from pyadmitad.tests.base import BaseTestCase -class StatisticsWebsitesTestCase(BaseTestCase): - - def test_get_statistics_websites_request(self): - self.set_mocker( - StatisticWebsites.URL, - website=22, - allowed_filtering=StatisticWebsites.FILTERING, - allowed_ordering=StatisticWebsites.ORDERING - ) - result = { - u'results': [ - { - u'clicks': 184, - u'cr': 0.3, - u'ctr': 0.03, - u'currency': u'RUB', - u'ecpc': 124.77, - u'ecpm': 4403.26, - u'leads_sum': 61, - u'payment_sum_approved': 1870.67, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 21087.97, - u'sales_sum': 10, - u'views': 5214, - u'website_id': 22, - u'website_name': u'website' - } - ], - u'_meta': { - u'count': 1, - u'limit': 20, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticWebsites.get(website=22) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'results'][0][u'website_id'], 22) - self.mocker.verify() - - -class StatisticsCampaignTestCase(BaseTestCase): - - def test_get_statistics_campaign_request(self): - self.set_mocker( - StatisticCampaigns.URL, - campaign=9, - allowed_filtering=StatisticCampaigns.FILTERING, - allowed_ordering=StatisticCampaigns.ORDERING - ) - result = { - u'results': [ - { - u'advcampaign_id': 9, - u'advcampaign_name': u'Campaign', - u'clicks': 35, - u'cr': 0.1143, - u'ctr': 0.4487, - u'currency': u'RUB', - u'ecpc': 5.714286, - u'ecpm': 2564.102564, - u'leads_sum': 4, - u'payment_sum_approved': 0.0, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 200.0, - u'sales_sum': 0, - u'views': 78 - }, - ], - u'_meta': { - u'count': 1, - u'limit': 20, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticCampaigns.get(campaign=9) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'results'][0][u'advcampaign_id'], 9) - self.mocker.verify() - - -class StatisticsDaysTestCase(BaseTestCase): - - def test_get_statistics_days_request(self): - self.set_mocker( - StatisticDays.URL, - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticDays.FILTERING, - allowed_ordering=StatisticDays.ORDERING - ) - result = { - u'results': [ - { - u'clicks': 3, - u'cr': 0.3333, - u'ctr': 0.0, - u'currency': u'RUB', - u'date': u'2013-01-12', - u'ecpc': 27.88, - u'ecpm': 0.0, - u'leads_sum': 1, - u'payment_sum_approved': 83.65, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 0.0, - u'sales_sum': 0, - u'views': 0 - } - ], - u'_meta': { - u'count': 3, - u'limit': 1, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticDays.get( - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - -class StatisticsMonthsTestCase(BaseTestCase): - - def test_get_statistics_months_request(self): - self.set_mocker( - StatisticMonths.URL, - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticMonths.FILTERING, - allowed_ordering=StatisticMonths.ORDERING - ) - result = { - u'results': [ - { - u'clicks': 3, - u'cr': 0.3333, - u'ctr': 0.0, - u'currency': u'RUB', - u'date': u'2013-01-12', - u'ecpc': 27.88, - u'ecpm': 0.0, - u'leads_sum': 1, - u'payment_sum_approved': 83.65, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 0.0, - u'sales_sum': 0, - u'views': 0 - } - ], - u'_meta': { - u'count': 3, - u'limit': 1, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticMonths.get( - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - -class StatisticsActionsTestCase(BaseTestCase): - - def test_get_statistics_actions_request(self): - self.set_mocker( - StatisticActions.URL, - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticActions.FILTERING, - allowed_ordering=StatisticActions.ORDERING - ) - result = { - u'results': [ - { - u'action': u'action name', - u'action_date': u'2013-01-15 18:23:54', - u'action_id': 281, - u'advcampaign_id': 9, - u'advcampaign_name': u'Campaign', - u'cart': 777.0, - u'click_date': u'2011-01-13 18:23:50', - u'closing_date': u'2012-04-02', - u'status_updated': u'2011-09-16 23:13:35', - u'comment': None, - u'conversion_time': 4, - u'currency': u'RUB', - u'keyword': None, - u'payment': 50.0, - u'status': u'pending', - u'subid': None, - u'subid1': None, - u'subid2': None, - u'subid3': None, - u'subid4': None, - u'website_name': u'site1_of_webmaster1' - } - ], - u'_meta': { - u'count': 89, - u'limit': 1, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticActions.get( - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - -class StatisticsSubIdsTestCase(BaseTestCase): - - def test_get_statistics_sub_ids_request(self): - self.set_mocker( - StatisticSubIds.URL % '', - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticSubIds.FILTERING, - allowed_ordering=StatisticSubIds.ORDERING - ) - result = { - u'_meta': { - u'count': 1, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'clicks': 1, - u'cr': 89.0, - u'currency': u'RUB', - u'ecpc': 5202.5, - u'leads_sum': 89, - u'payment_sum_approved': 5002.5, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 200.0, - u'sales_sum': 0, - u'subid': u'sub' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticSubIds.get( - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - -class StatisticsSourcesTestCase(BaseTestCase): - - def test_get_statistics_sources_request(self): - self.set_mocker( - StatisticSources.URL, - campaign=6, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticSources.FILTERING, - allowed_ordering=StatisticSources.ORDERING - ) - result = { - u'_meta': { - u'count': 2, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'clicks': 7, - u'cr': 0.1429, - u'currency': u'RUB', - u'ecpc': 51.785714, - u'leads_sum': 1, - u'payment_sum_approved': 0.0, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 362.5, - u'sales_sum': 0, - u'source': u'g', - u'source_name': u'Google Adwords' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticSources.get( - campaign=6, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - -class StatisticsKeywordsTestCase(BaseTestCase): - - def test_get_statistics_keywords_request(self): - self.set_mocker( - StatisticKeywords.URL, - campaign=6, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticKeywords.FILTERING, - allowed_ordering=StatisticKeywords.ORDERING - ) - result = { - u'_meta': { - u'count': 3, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'clicks': 2, - u'cr': 0.0, - u'currency': u'RUB', - u'ecpc': 0.0, - u'keyword': u'keyword', - u'leads_sum': 0, - u'payment_sum_approved': 0.0, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 0.0, - u'sales_sum': 0 - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticKeywords.get( - campaign=6, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() +class StatisticWebsitesTestCase(BaseTestCase): + + def test_get_statistic_websites_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticWebsites.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'subid': '1234567890987654321', + 'total': 200, + 'order_by': ['cr'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticWebsites.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + subid='1234567890987654321', + total=200, + order_by=['cr'] + ) + + self.assertIn('status', result) + + +class StatisticCampaignTestCase(BaseTestCase): + + def test_get_statistic_campaign_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticCampaigns.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'subid': '1234567890987654321', + 'total': 200, + 'order_by': ['cr'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticCampaigns.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + subid='1234567890987654321', + total=200, + order_by=['cr'] + ) + + self.assertIn('status', result) + + +class StatisticDaysTestCase(BaseTestCase): + + def test_get_statistic_days_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticDays.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'subid': '1234567890987654321', + 'total': 200, + 'order_by': ['cr'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticDays.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + subid='1234567890987654321', + total=200, + order_by=['cr'] + ) + + self.assertIn('status', result) + + +class StatisticMonthsTestCase(BaseTestCase): + + def test_get_statistic_months_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticMonths.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'subid': '1234567890987654321', + 'total': 200, + 'order_by': ['cr'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticMonths.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + subid='1234567890987654321', + total=200, + order_by=['cr'] + ) + + self.assertIn('status', result) + + +class StatisticActionsTestCase(BaseTestCase): + + def test_get_statistic_actions_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticActions.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'closing_date_start': '01.01.2010', + 'closing_date_end': '01.02.2010', + 'status_updated_start': '01.01.2010 10:10:10', + 'status_updated_end': '01.02.2010 10:10:10', + 'website': 10, + 'campaign': 20, + 'subid': '1234567890987654321', + 'subid1': '1234567890987654321', + 'subid4': '1234567890987654321', + 'status': 1, + 'keyword': 'foo', + 'action': 'lead', + 'action_type': 'lead', + 'action_id': 27, + 'order_by': ['status'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticActions.get( + date_start='01.01.2010', + date_end='01.02.2010', + closing_date_start='01.01.2010', + closing_date_end='01.02.2010', + status_updated_start='01.01.2010 10:10:10', + status_updated_end='01.02.2010 10:10:10', + website=10, + campaign=20, + subid='1234567890987654321', + subid1='1234567890987654321', + subid4='1234567890987654321', + status=1, + keyword='foo', + action='lead', + action_type='lead', + action_id=27, + order_by=['status'] + ) + + self.assertIn('status', result) + + +class StatisticSubIdsTestCase(BaseTestCase): + + def test_get_statistic_sub_ids_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticSubIds.URL, subid_number='', params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'subid1': '123567', + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticSubIds.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + subid1='123567' + ) + + self.assertIn('status', result) + + +class StatisticSourcesTestCase(BaseTestCase): + + def test_get_statistic_sources_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticSources.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 22, + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticSources.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=22 + ) + + self.assertIn('status', result) + + +class StatisticKeywordsTestCase(BaseTestCase): + + def test_get_statistic_keywords_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticKeywords.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'source': 'g', + 'order_by': ['cr', 'ecpc'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticKeywords.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + source='g', + order_by=['cr', 'ecpc'] + ) + + self.assertIn('status', result) if __name__ == '__main__': diff --git a/pyadmitad/tests/test_tickets.py b/pyadmitad/tests/test_tickets.py new file mode 100644 index 0000000..333c188 --- /dev/null +++ b/pyadmitad/tests/test_tickets.py @@ -0,0 +1,89 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from pyadmitad.items import Tickets, TicketsManager +from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from pyadmitad.tests.base import BaseTestCase + + +class TicketsTestCase(BaseTestCase): + + def test_get_tickets_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Tickets.URL, params={ + 'status': 1, + 'date_start': '01.01.2010', + 'date_end': '01.01.2020', + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Tickets.get( + status=1, + date_start='01.01.2010', + date_end='01.01.2020' + ) + + self.assertIn('status', result) + + def test_get_single_ticket_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Tickets.SINGLE_URL, ticket_id=22), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Tickets.getOne(22) + + self.assertIn('status', result) + + +class ManageTicketsTestCase(BaseTestCase): + + def test_create(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(TicketsManager.CREATE_URL), + json={'status': 'ok'}, + status=200 + ) + + result = self.client.TicketsManager.create( + subject='foo', + text='bar', + campaign=90, + category=20, + priority=2, + ) + + self.assertIn('status', result) + + def test_commenting(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(TicketsManager.COMMENT_URL, ticket_id=276), + json={'status': 'ok'}, + status=200 + ) + + result = self.client.TicketsManager.comment(276, text='comment text') + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_transport.py b/pyadmitad/tests/test_transport.py new file mode 100644 index 0000000..a1cae3e --- /dev/null +++ b/pyadmitad/tests/test_transport.py @@ -0,0 +1,300 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +from datetime import datetime + +import responses + +from pyadmitad.transport import oauth_client_authorization, get_credentials, build_headers, \ + prepare_request_data, api_request, oauth_refresh_access_token, HttpTransport +from pyadmitad.constants import DEFAULT_REQUEST_TIMEOUT, DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET, \ + BASE_URL, TOKEN_URL +from pyadmitad.exceptions import HttpException +from pyadmitad.tests.base import BaseTestCase + + +class BaseTransportTestCase(BaseTestCase): + + def test_get_credentials(self): + self.assertEqual(get_credentials('foobarbaz', '123456789'), 'Zm9vYmFyYmF6OjEyMzQ1Njc4OQ==') + + def test_build_headers(self): + self.assertDictEqual(build_headers('foobarbaz', user_agent='test_bot'), { + 'Authorization': 'Bearer foobarbaz', + 'Connection': 'Keep-Alive', + 'User-Agent': 'test_bot', + }) + + def test_prepare_request_data(self): + data = prepare_request_data({'foo': 42}, None, 'GET', timeout=10) + + self.assertDictEqual(data, { + 'headers': {}, + 'timeout': 10, + 'verify': False, + 'allow_redirects': True, + 'params': {'foo': 42} + }) + + data = prepare_request_data({'foo': 42}, None, 'POST') + + self.assertDictEqual(data, { + 'headers': {}, + 'timeout': DEFAULT_REQUEST_TIMEOUT, + 'verify': False, + 'allow_redirects': True, + 'data': {'foo': 42} + }) + + data = prepare_request_data({'foo': [None, None, 11]}, None, 'GET') + + self.assertDictEqual(data, { + 'headers': {}, + 'timeout': DEFAULT_REQUEST_TIMEOUT, + 'verify': False, + 'allow_redirects': True, + 'params': {'foo': [11]} + }) + + def test_api_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + 'http://example.com/', + json={ + 'status': 'ok' + }, + status=200 + ) + + result = api_request('http://example.com/') + + self.assertIn('status', result) + self.assertEqual('ok', result['status']) + + def test_api_request_404(self): + with self.assertRaises(HttpException): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + 'http://example.com/', + json={}, + status=400 + ) + + api_request('http://example.com/') + + def test_api_request_get(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + 'http://example.com/?bar=1&baz=0', + match_querystring=True, + json={ + 'success': 'ok' + }, + status=200 + ) + + result = api_request('http://example.com/', data={ + 'foo': [None], + 'bar': 1, + 'baz': 0 + }) + + self.assertIn('success', result) + + def test_oauth_refresh_access_token(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + TOKEN_URL, + json={ + 'access_token': 'access_token', + 'expires_in': '604800', + 'refresh_token': 'refresh', + 'token_type': 'bearer', + 'username': 'username', + 'first_name': 'first_name', + 'last_name': 'second_name', + 'language': 'en', + }, + status=200 + ) + + result = oauth_refresh_access_token({ + 'client_id': 'client_id', + 'client_secret': 'secret', + 'refresh_token': 'r_token', + }) + + self.assertIn('access_token', result) + self.assertIn('expires_in', result) + self.assertIn('refresh_token', result) + self.assertIn('username', result) + self.assertIn('first_name', result) + self.assertIn('last_name', result) + self.assertIn('language', result) + + def test_oauth_client_authorization(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + TOKEN_URL, + json={ + 'access_token': 'access_token', + 'expires_in': '604800', + 'refresh_token': 'refresh', + 'token_type': 'bearer', + 'username': 'username', + 'first_name': 'first_name', + 'last_name': 'second_name', + 'language': 'en', + 'scope': 'pricate_data', + }, + status=200 + ) + + result = oauth_client_authorization({ + 'client_id': 'client_id', + 'client_secret': 'secret', + 'scopes': 'private_data', + }) + + self.assertIn('access_token', result) + self.assertIn('expires_in', result) + self.assertIn('refresh_token', result) + self.assertIn('username', result) + self.assertIn('first_name', result) + self.assertIn('last_name', result) + self.assertIn('language', result) + + +class TransportTestCase(BaseTestCase): + + def test_set_default_pagination(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BASE_URL, params={ + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={ + 'status': 'ok' + }, + status=200 + ) + + result = HttpTransport('access_token').get() \ + .set_pagination() \ + .request(url=BASE_URL) + + self.assertIn('status', result) + + def test_set_pagination(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BASE_URL, params={ + 'limit': 120, + 'offset': 100 + }), + match_querystring=True, + json={ + 'status': 'ok' + }, + status=200 + ) + + result = HttpTransport('access_token').get() \ + .set_pagination(limit=120, offset=100) \ + .request(url=BASE_URL) + + self.assertIn('status', result) + + def test_set_ordering(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BASE_URL, params={ + 'order_by': 'name' + }), + match_querystring=True, + json={ + 'status': 'ok' + }, + status=200 + ) + + result = HttpTransport('access_token').get() \ + .set_ordering(ordering={ + 'order_by': 'name', + 'available': ['name'] + }) \ + .request(url=BASE_URL) + + self.assertIn('status', result) + + def test_set_multiple_ordering(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BASE_URL, params={ + 'order_by': ['name', '-date_updated'] + }), + match_querystring=True, + json={ + 'status': 'ok' + }, + status=200 + ) + + result = HttpTransport('access_token').get() \ + .set_ordering(ordering={ + 'order_by': [None, 'name', '-date_updated'], + 'available': ['name', 'date_updated'] + }) \ + .request(url=BASE_URL) + + self.assertIn('status', result) + + def test_set_filtering(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BASE_URL, params={ + 'name': 'FOOBARBAZ', + 'foo': 42, + 'date_start': '01.01.2020', + }), + match_querystring=True, + json={ + 'status': 'ok' + }, + status=200 + ) + + result = HttpTransport('access_token').get() \ + .set_filtering(filtering={ + 'filter_by': { + 'name': 'foobarbaz', + 'foo': 42, + 'date_start': datetime(2020, 1, 1), + 'some': 12, + }, + 'available': { + 'name': lambda x: x.upper(), + 'foo': lambda x: x, + 'date_start': lambda x: x.strftime('%d.%m.%Y'), + } + }) \ + .request(url=BASE_URL) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/pyadmitad/tests/test_websites.py b/pyadmitad/tests/test_websites.py index 4480bed..2a2a54d 100644 --- a/pyadmitad/tests/test_websites.py +++ b/pyadmitad/tests/test_websites.py @@ -1,239 +1,267 @@ -# -*- coding: utf-8 -*- +# coding: utf-8 +from __future__ import unicode_literals import unittest +import responses + from pyadmitad.items import Websites, WebsitesManage from pyadmitad.tests.base import BaseTestCase -WEBSITE_CREATE_DATA = dict( - regions=['RU'], - atnd_hits='20', - atnd_visits='10', - name='website1', - language='ru', - site_url='http://google.com', - description='descriptiondescriptiondescriptiondescription' - 'descriptiondescriptiondescriptiondescription' - 'descriptiondescription', - categories=['1', '2'], - kind='website' -) - - class WebsitesTestCase(BaseTestCase): def test_get_websites_request(self): - self.set_mocker(Websites.URL, limit=1, offset=2) - result = { - u'results': [ - { - u'status': u'active', - u'kind': u'website', - u'is_old': True, - u'name': u'site', - u'language': 'ru', - u'description': u'site', - u'verification_code': u'59505879f5', - u'creation_date': u'2010-03-31 18:25:19', - u'regions': [ - { - u'region': u'RU', - u'id': 5 - } - ], - u'atnd_visits': 100, - u'adservice': None, - u'site_url': u'http://www.mail.ru/', - u'id': 22, - u'categories': [ - { - u'name': u'Категория', - u'parent': None, - u'id': 5 - } - ], - u'atnd_hits': 0 - } - ], - u'_meta': { - u'count': 4, - u'limit': 1, - u'offset': 2 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Websites.get(limit=1, offset=2) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Websites.URL, params={ + 'limit': 1, + 'offset': 2 + }), + match_querystring=True, + json={ + 'results': [{ + 'id': 4, + 'status': 'active', + 'kind': 'website', + 'name': 'FooName', + 'categories': [1, 2], + 'adservice': None, + 'creation_date': '2010-04-17T21:54:45', + 'description': '', + 'is_old': True, + 'mailing_targeting': False, + 'regions': ['RU'], + 'site_url': 'https://foo.bar/', + 'validation_passed': False, + 'verification_code': '11c0sd4d14', + 'atnd_hits': 122, + 'atnd_visits': 10, + }], + '_meta': { + 'limit': 1, + 'offset': 2, + 'count': 9, + } + }, + status=200 + ) + + result = self.client.Websites.get(limit=1, offset=2) + + self.assertEqual(len(result['results']), 1) + self.assertIn('count', result['_meta']) + for item in result['results']: + self.assertIn('id', item) + self.assertIn('kind', item) + self.assertIn('status', item) + self.assertIn('name', item) + self.assertIn('categories', item) + self.assertIn('adservice', item) + self.assertIn('creation_date', item) + self.assertIn('description', item) + self.assertIn('is_old', item) + self.assertIn('mailing_targeting', item) + self.assertIn('regions', item) + self.assertIn('site_url', item) + self.assertIn('validation_passed', item) + self.assertIn('verification_code', item) + self.assertIn('atnd_hits', item) + self.assertIn('atnd_visits', item) def test_get_websites_request_with_id(self): - self.set_mocker(Websites.SINGLE_URL, id=22, with_pagination=False) - result = { - u'status': u'active', - u'kind': u'website', - u'is_old': True, - u'name': u'site', - u'language': 'ru', - u'description': u'site', - u'verification_code': u'59505879f5', - u'creation_date': u'2010-03-31 18:25:19', - u'regions': [ - { - u'region': u'RU', - u'id': 5 - } - ], - u'atnd_visits': 100, - u'adservice': None, - u'site_url': u'http://www.mail.ru/', - u'id': 22, - u'categories': [ - { - u'name': u'Категория', - u'parent': None, - u'id': 5 - } - ], - u'atnd_hits': 0 - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Websites.getOne(22) - self.assertEqual(res[u'id'], 22) - self.mocker.verify() + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Websites.SINGLE_URL, website_id=4), + json={ + 'id': 4, + 'status': 'active', + 'kind': 'website', + 'name': 'FooName', + 'categories': [{ + 'id': 1, + 'language': 'en', + 'name': 'Cat1', + 'parent': None + }, { + 'id': 2, + 'language': 'en', + 'name': 'Cat2', + 'parent': None + }], + 'adservice': None, + 'creation_date': '2010-04-17T21:54:45', + 'description': '', + 'is_old': True, + 'mailing_targeting': False, + 'regions': ['RU'], + 'site_url': 'https://foo.bar/', + 'validation_passed': False, + 'verification_code': '11c0sd4d14', + 'atnd_hits': 122, + 'atnd_visits': 10, + }, + status=200 + ) + + result = self.client.Websites.getOne(4) + + self.assertIn('id', result) + self.assertIn('kind', result) + self.assertIn('status', result) + self.assertIn('name', result) + self.assertIn('categories', result) + self.assertIn('adservice', result) + self.assertIn('creation_date', result) + self.assertIn('description', result) + self.assertIn('is_old', result) + self.assertIn('mailing_targeting', result) + self.assertIn('regions', result) + self.assertIn('site_url', result) + self.assertIn('validation_passed', result) + self.assertIn('verification_code', result) + self.assertIn('atnd_hits', result) + self.assertIn('atnd_visits', result) class WebsitesManageTestCase(BaseTestCase): def test_create_website_request(self): - self.set_mocker( - WebsitesManage.CREATE_URL, - method='POST', - with_pagination=False, - data=WEBSITE_CREATE_DATA) - result = { - u'atnd_hits': 20, - u'atnd_visits': 10, - u'categories': [ - { - u'id': 1, - u'name': u'Магазин', - u'parent': None + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(WebsitesManage.CREATE_URL), + match_querystring=True, + json={ + 'id': 42, + 'status': 'new', + 'kind': 'website', + 'name': 'FooBar', + 'categories': [{ + 'id': 1, + 'language': 'en', + 'name': 'Cat1', + 'parent': None + }, { + 'id': 2, + 'language': 'en', + 'name': 'Cat2', + 'parent': None + }], + 'adservice': None, + 'creation_date': '2016-10-10T11:54:45', + 'description': 'Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry\'s standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.', + 'is_old': False, + 'mailing_targeting': True, + 'regions': ['RU'], + 'site_url': 'https://foobar.bar/', + 'validation_passed': False, + 'verification_code': '244a5d4a14', + 'atnd_hits': 500, + 'atnd_visits': 100, }, - { - u'id': 2, - u'name': u'Онлайн-игры', - u'parent': None - } - ], - u'creation_date': u'2013-04-22 14:41:29', - u'description': u'descriptiondescriptiondescriptiondescription' - u'descriptiondescriptiondescriptiondescription' - u'descriptiondescription', - u'id': 52, - u'is_old': False, - u'kind': u'website', - u'language': u'ru', - u'name': u'website1', - u'regions': [ - { - u'id': 25, - u'region': u'RU' - } - ], - u'site_url': u'http://google.com/', - u'status': u'new', - u'verification_code': u'fde88f4b6b' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsitesManage.create(**WEBSITE_CREATE_DATA) - self.assertIn(u'id', res) - self.assertEqual(u'new', res['status']) - self.assertEqual(u'website', res['kind']) - self.mocker.verify() + status=200 + ) + + result = self.client.WebsitesManage.create( + name='FooBar', + kind='website', + language='en', + site_url='https://foobar.baz/', + description='Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry\'s standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.', + categories=[1, 2], + regions=['RU'], + atnd_visits=500, + atnd_hits=100, + mailing_targeting=True + ) + + self.assertIn('id', result) + self.assertIn('name', result) + self.assertIn('status', result) + self.assertIn('kind', result) + self.assertIn('verification_code', result) def test_update_website_request(self): - self.set_mocker( - WebsitesManage.UPDATE_URL, - id=52, - method='POST', - with_pagination=False, - data={'language': 'de', 'name': 'test-update'}) - result = { - u'atnd_hits': 20, - u'atnd_visits': 10, - u'categories': [ - { - u'id': 1, - u'name': u'Магазин', - u'parent': None + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(WebsitesManage.UPDATE_URL, website_id=42), + json={ + 'id': 42, + 'status': 'new', + 'kind': 'website', + 'name': 'FooBarBaz', + 'categories': [{ + 'id': 1, + 'language': 'en', + 'name': 'Cat1', + 'parent': None + }, { + 'id': 2, + 'language': 'en', + 'name': 'Cat2', + 'parent': None + }], + 'adservice': None, + 'creation_date': '2016-10-10T11:54:45', + 'description': 'Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry\'s standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.', + 'is_old': False, + 'mailing_targeting': True, + 'regions': ['RU'], + 'site_url': 'https://foobar.bar/', + 'validation_passed': False, + 'verification_code': '244a5d4a14', + 'atnd_hits': 1000, + 'atnd_visits': 100, }, - { - u'id': 2, - u'name': u'Онлайн-игры', - u'parent': None - } - ], - u'creation_date': u'2013-04-22 14:41:29', - u'description': u'descriptiondescriptiondescriptiondescription' - u'descriptiondescriptiondescriptiondescription' - u'descriptiondescription', - u'id': 52, - u'is_old': False, - u'kind': u'website', - u'language': u'de', - u'name': u'test-update', - u'regions': [ - { - u'id': 25, - u'region': u'RU' - } - ], - u'site_url': u'http://google.com/', - u'status': u'new', - u'verification_code': u'fde88f4b6b' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsitesManage.update( - 52, language='de', name='test-update') - self.assertIn(u'id', res) - self.assertEqual(u'test-update', res['name']) - self.assertEqual(u'de', res['language']) - self.mocker.verify() + status=200 + ) + + result = self.client.WebsitesManage.update( + 42, + name='FooBarBaz', + atnd_visits=1000, + ) + + self.assertIn('id', result) + self.assertIn('name', result) + self.assertIn('atnd_visits', result) def test_verify_website_request(self): - self.set_mocker( - WebsitesManage.VERIFY_URL, - id=52, method='POST', with_pagination=False) - result = { - "message": "Площадка прошла автоматическую проверку." - " Ожидайте подтверждения администрацией.", - "success": "Accepted" - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsitesManage.verify(52) - self.assertIn(u'success', res) + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(WebsitesManage.VERIFY_URL, website_id=42), + json={ + 'message': 'Message', + 'success': 'Accepted' + }, + status=200 + ) + + result = self.client.WebsitesManage.verify(42) + + self.assertIn('message', result) + self.assertIn('success', result) def test_delete_website_request(self): - self.set_mocker( - WebsitesManage.DELETE_URL, - id=52, method='POST', with_pagination=False) - result = { - "message": "Площадка удалена успешно.", - "success": "Deleted" - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsitesManage.delete(52) - self.assertIn(u'success', res) + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(WebsitesManage.DELETE_URL, website_id=42), + json={ + 'message': 'Message', + 'success': 'Deleted' + }, + status=200 + ) + + result = self.client.WebsitesManage.delete(42) + + self.assertIn('message', result) + self.assertIn('success', result) if __name__ == '__main__': diff --git a/pyadmitad/transport.py b/pyadmitad/transport.py index eef9f06..2b23ec9 100644 --- a/pyadmitad/transport.py +++ b/pyadmitad/transport.py @@ -1,16 +1,15 @@ -import requests -from base64 import b64encode +# coding: utf-8 +from __future__ import unicode_literals + import json -import urllib -try: - import urlparse -except ImportError: - import urllib.parse -import uuid import logging -from pyadmitad.constants import * -from pyadmitad.exceptions import * +from base64 import b64encode +import requests + +from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET, \ + DEFAULT_REQUEST_TIMEOUT, MAX_PAGINATION_LIMIT, TOKEN_URL +from pyadmitad.exceptions import HttpException, ConnectionException, JsonException LOG = logging.getLogger(__file__) LOG.addHandler(logging.StreamHandler()) @@ -31,38 +30,65 @@ def debug_log(value, debug=True): LOG.setLevel(logging.NOTSET) -def prepare_request_data( - data=None, headers=None, method='GET', - timeout=None, ssl_verify=False): - if headers is None: - headers = {} - kwargs = {} - if timeout is None: - timeout = DEFAULT_REQUEST_TIMEOUT - kwargs['timeout'] = timeout +def get_credentials(client_id, client_secret): + return b64encode( + ('%s:%s' % (client_id, client_secret)).encode('utf-8') + ).decode('utf-8') + + +def build_headers(access_token, user_agent=None): + headers = { + 'Authorization': 'Bearer %s' % access_token, + 'Connection': 'Keep-Alive', + } + + if user_agent: + headers['User-Agent'] = user_agent + return headers + + +def prepare_data(data=None): + if data: + new_data = {} + for key, value in data.items(): + if isinstance(value, (list, tuple, set)): + new_data[key] = [item for item in value if item is not None] + else: + new_data[key] = value if value is not None else None + return new_data + return data + + +def prepare_request_data(data=None, headers=None, method='GET', + timeout=None, ssl_verify=False): + kwargs = { + 'headers': headers if headers is not None else {}, + 'timeout': timeout if timeout is not None else DEFAULT_REQUEST_TIMEOUT, + 'verify': ssl_verify, + 'allow_redirects': True, + } + + prepared_data = prepare_data(data) + if method == 'POST': - kwargs['data'] = data + kwargs['data'] = prepared_data if method == 'GET': - kwargs['params'] = data - kwargs['headers'] = headers - kwargs['allow_redirects'] = True - kwargs['verify'] = ssl_verify + kwargs['params'] = prepared_data + return kwargs -def api_request( - url, data=None, headers=None, method='GET', - files=None, timeout=None, ssl_verify=False, debug=False): - kwargs = prepare_request_data( - data=data, headers=headers, method=method, - timeout=timeout, ssl_verify=ssl_verify) +def api_request(url, data=None, headers=None, method='GET', + files=None, timeout=None, ssl_verify=True, debug=False): + kwargs = prepare_request_data(data=data, headers=headers, method=method, + timeout=timeout, ssl_verify=ssl_verify) status_code = 500 - content = u'' + content = '' try: response = requests.request(method, url, files=files, **kwargs) - debug_log(u'Request url: %s' % response.url, debug) + debug_log('Request url: %s' % response.url, debug) # if method == 'POST': - # debug_log(u'Request body: %s' % response.request.body, debug) + # debug_log('Request body: %s' % response.request.body, debug) status_code = response.status_code content = response.content if status_code >= 400: @@ -76,66 +102,6 @@ def api_request( return response.json() -def get_credentials(client_id, client_secret): - return b64encode( - ("%s:%s" % (client_id, client_secret)).encode('utf-8') - ).decode('utf-8') - - -def api_post_request(url, **kwargs): - kwargs['method'] = "POST" - return api_request(url, **kwargs) - - -def api_get_request(url, **kwargs): - kwargs['method'] = "GET" - return api_request(url, **kwargs) - - -def build_authorization_headers(access_token): - return {'Authorization': "Bearer %s" % access_token} - - -def build_headers(access_token, user_agent=None, language=None): - headers = build_authorization_headers(access_token) - headers['Connection'] = 'Keep-Alive' - if user_agent: - headers['User-Agent'] = user_agent - if language: - headers['Content-Language'] = language - return headers - - -def oauth_password_authorization(data): - """ - OAuth2 password authorization - Used to get an access_token with the user's password and username - The function parameter should be a dictionary with next structure: - data = { - 'client_id': '', - 'client_secret': '', - 'username': '', - 'password': '', - 'scope': '' - } - """ - client_id = data['client_id'] - client_secret = data['client_secret'] - params = { - 'grant_type': 'password', - 'client_id': client_id, - 'username': data['username'], - 'password': data['password'], - 'scope': data['scopes'] - } - credentials = get_credentials(client_id, client_secret) - headers = { - 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': 'Basic %s' % credentials - } - return api_post_request(TOKEN_URL, data=params, headers=headers) - - def oauth_refresh_access_token(data): """ refresh an access token. Returns dictionary with new access_token. @@ -157,7 +123,7 @@ def oauth_refresh_access_token(data): 'refresh_token': refresh_token } headers = {'Content-Type': 'application/x-www-form-urlencoded'} - return api_post_request(TOKEN_URL, data=params, headers=headers) + return api_request(TOKEN_URL, method='POST', data=params, headers=headers) def oauth_client_authorization(data): @@ -183,220 +149,53 @@ def oauth_client_authorization(data): 'Content-Type': 'application/x-www-form-urlencoded', 'Authorization': 'Basic %s' % credentials } - return api_post_request(TOKEN_URL, data=params, headers=headers) - + return api_request(TOKEN_URL, method='POST', data=params, headers=headers) -class OAuthServerAuthorisation(object): - """ - OAuth2 server authorization. - Used to get an access_token with the web authentication - """ - - def __init__(self, data): - """ - The constructor parameter should be a dictionary with next structure: - data = { - 'client_secret': '', - 'client_id': '' - 'scopes': '', - 'redirect_uri': '', - } - """ - self.client_id = data['client_id'] - self.client_secret = data['client_secret'] - self.scopes = data['scopes'] - self.redirect_uri = data.get('redirect_uri') - self.language = data.get('language', DEFAULT_LANGUAGE) - self.state = None - - def get_authorize_url(self): - """ - Get an url that client should be redirected to pass - the authentication - """ - self.state = uuid.uuid4().get_hex() - params = { - 'client_id': self.client_id, - 'response_type': 'code', - 'state': self.state, - 'scope': self.scopes, - 'redirect_uri': self.redirect_uri - } - return "%s?%s" % (AUTHORIZE_URL, urllib.urlencode(params)) - - def get_access_token(self, url): - """ - Get access token request. - The URL parameter is a URL to which the client was redirected - after authentication - """ - url_params = dict(urlparse.parse_qsl(urlparse.urlparse(url).query)) - state = url_params.get('state') - if not state or state != self.state: - raise ApiException('Wrong or absent the state parameter.') - if 'error' in url_params: - raise ApiException(url_params['error']) - if 'code' not in url_params: - raise ApiException( - 'Invalid response. The authorization code is absent.') - # go to get access token - params = { - 'grant_type': 'authorization_code', - 'client_id': self.client_id, - 'client_secret': self.client_secret, - 'code': url_params['code'], - 'redirect_uri': self.redirect_uri - } - headers = {'Content-Type': 'application/x-www-form-urlencoded'} - response = api_post_request(TOKEN_URL, data=params, headers=headers) - if 'access_token' not in response: - raise ApiException('Invalid response. The access_token is absent.') - return response +class HttpTransport(object): -class HttpTransportPagination(object): + SUPPORTED_METHODS = ('GET', 'POST', 'DELETE', 'PUT') - DEFAULT_LIMIT = 20 - DEFAULT_OFFSET = 0 + def __init__(self, access_token, user_agent=None, debug=False): + self._headers = build_headers(access_token, user_agent=user_agent) + self._method = 'GET' + self._files = None + self._data = None + self._url = None + self._debug = debug - def __init__(self, **kwargs): - self.offset = self._get_pagination_offset(**kwargs) - self.limit = self._get_pagination_limit(**kwargs) + def set_method(self, method): + if method in self.SUPPORTED_METHODS: + self._method = method + else: + raise AttributeError('This http method "%s" is not supported' % method) + # here we should clean data + return self.clean_data() - @staticmethod - def _check_pagination_value(value, maximum=None, minimum=None): - try: - value = int(value) - except (ValueError, TypeError): - return - if value < 0: - return - if maximum is not None and value > maximum: - return - if minimum is not None and value < minimum: - return - return value - - def _get_pagination_limit(self, **kwargs): - if 'limit' in kwargs: - limit = self._check_pagination_value( - kwargs['limit'], MAX_PAGINATION_LIMIT, 1) - if limit is not None: - return limit - return self.DEFAULT_LIMIT - - def _get_pagination_offset(self, **kwargs): - if 'offset' in kwargs: - offset = self._check_pagination_value(kwargs['offset']) - if offset is not None: - return offset - return self.DEFAULT_OFFSET - - def to_value(self): - return {'limit': self.limit, 'offset': self.offset} - - -class HttpTransportOrdering(object): - - ORDER_PARAMETER = 'order_by' - - def __init__(self, **kwargs): - allowed_ordering = kwargs.get('allowed_ordering', ()) - ordering = str(kwargs.get(self.ORDER_PARAMETER, '')) - suffix = '' - if ordering: - if ordering.startswith('-'): - suffix = '-' - ordering = ordering[1:] - if ordering not in allowed_ordering: - ordering = None - self.ordering = ordering - self.suffix = suffix - - def to_value(self): - if self.ordering: - return {self.ORDER_PARAMETER: '%s%s' % (self.suffix, self.ordering)} - return {} - - -class HttpTransportFiltering(object): - - def __init__(self, **kwargs): - self.result = {} - allowed_filtering = kwargs.get('allowed_filtering', {}) or {} - if not allowed_filtering: - return - self.allowed_filtering = allowed_filtering - self.check_filtering(**kwargs) - - def check_value(self, val, func): - """ - Should return False in boolean meaning - in case of unsupported or wrong value - """ - if not func: - return val - try: - return func(val) - except (TypeError, ValueError): - pass - - def check_values(self, values, func): - return filter(None, [self.check_value(value, func) for value in values]) - - def check_filtering(self, **filtering): - for val in self.allowed_filtering: - value = filtering.get(val) - if value is None: - continue - if not isinstance(value, (tuple, list)): - value = [value] - func = self.allowed_filtering[val] - res = self.check_values(value, func) - if res: - self.result.setdefault(val, []).extend(res) - - def to_value(self): - for key in self.result: - self.result[key] = list(set(self.result[key])) - return self.result + def get(self): + return self.set_method('GET') + def post(self): + return self.set_method('POST') -class HttpTransport(object): + def put(self): + return self.set_method('PUT') - SUPPORTED_METHODS = ('GET', 'POST', 'DELETE') - SUPPORTED_LANGUAGES = ('ru', 'en', 'de', 'pl') + def delete(self): + return self.set_method('DELETE') - def __init__(self, access_token, method=None, user_agent=None, debug=False): - self._headers = build_headers(access_token, user_agent=user_agent) - self._method = method or 'GET' - self._files = None - self._data = None - self._url = None - self._language = None + def set_debug(self, debug): self._debug = debug + return self def set_url(self, url, **kwargs): self._url = url % kwargs return self - def set_language(self, language): - if language in self.SUPPORTED_LANGUAGES: - self._language = language - self._headers['Content-Language'] = language - else: - raise AttributeError( - 'This language "%s" is not supported' % language) - return self - def set_data(self, data): self._data = data return self - def set_files(self, files): - self._files = files - return self - def clean_data(self): self._data = None return self @@ -407,45 +206,44 @@ def update_data(self, values): self._data.update(values) return self + def set_files(self, files): + self._files = files + return self + def set_pagination(self, **kwargs): - return self.update_data(HttpTransportPagination(**kwargs).to_value()) + limit = kwargs.get('limit', DEFAULT_PAGINATION_LIMIT) + offset = kwargs.get('offset', DEFAULT_PAGINATION_OFFSET) - def set_ordering(self, **kwargs): - return self.update_data(HttpTransportOrdering(**kwargs).to_value()) + data = { + 'limit': limit if 0 < limit <= MAX_PAGINATION_LIMIT else DEFAULT_PAGINATION_LIMIT, + 'offset': offset if offset > 0 else DEFAULT_PAGINATION_OFFSET, + } - def set_filtering(self, **kwargs): - return self.update_data(HttpTransportFiltering(**kwargs).to_value()) + return self.update_data(data) - def set_method(self, method): - if method in self.SUPPORTED_METHODS: - self._method = method - else: - raise AttributeError( - 'This http method "%s" is not supported' % method) - # here we should clean data - return self.clean_data() + def set_ordering(self, ordering): + order_by = ordering.get('order_by', []) + available = ordering.get('available', []) - def get(self): - return self.set_method('GET') + if not isinstance(order_by, (list, tuple, set)): + order_by = [order_by] - def post(self): - return self.set_method('POST') + data = { + 'order_by': [item for item in order_by if item is not None and + (item[1:] if item[0] == '-' else item) in available] + } - def set_debug(self, debug): - self._debug = debug - return self + return self.update_data(data) - @staticmethod - def _handle_response(response): - return response + def set_filtering(self, filtering): + filter_by = filtering.get('filter_by', {}) + available = filtering.get('available', {}) - @staticmethod - def api_request(url, **kwargs): - return api_request(url, **kwargs) + data = {key: available[key](value) for key, value in filter_by.items() if key in available} + + return self.update_data(data) def request(self, **kwargs): - if 'language' in kwargs: - self.set_language(kwargs['language']) if 'url' in kwargs: self.set_url(kwargs.pop('url'), **kwargs) if 'debug' in kwargs: @@ -453,7 +251,9 @@ def request(self, **kwargs): if not self._url: raise AttributeError( 'Absent url parameter. Use set_url method or pass ' - 'url parameter in this method.') + 'url parameter in this method.' + ) + requests_kwargs = { 'method': self._method, 'headers': self._headers, @@ -461,8 +261,18 @@ def request(self, **kwargs): 'debug': self._debug, 'files': self._files, } - response = self.api_request(self._url, **requests_kwargs) - return kwargs.get('handler', self._handle_response)(response) + response = HttpTransport.api_request(self._url, **requests_kwargs) + handler = kwargs.get('handler', self._handle_response) + + return handler(response) + + @staticmethod + def api_request(url, **kwargs): + return api_request(url, **kwargs) + + @staticmethod + def _handle_response(response): + return response def __call__(self, **kwargs): return self.request(**kwargs) diff --git a/setup.py b/setup.py index bdc9868..ba9a5f4 100644 --- a/setup.py +++ b/setup.py @@ -8,11 +8,11 @@ author_email='dev@admitad.com', description='A Python wrapper around the Admitad API', license='MIT', - url='https://github.com/admitad/admitad-python-api.git', - keywords='admitad', - install_requires=['requests', 'simplejson'], - test_suite='nose.collector', - tests_require=['nose', 'mocker'], + url='https://github.com/admitad/admitad-python-api', + keywords=['admitad'], + install_requires=['requests>=2.0', 'future'], + tests_require=['nose2', 'responses'], + test_suite='nose2.collector.collector', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', @@ -20,8 +20,7 @@ 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Communications', 'Topic :: Internet', - ], - dependency_links=[ - "git+https://github.com/trezorg/mocker.git#egg=mocker", + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.5', ], ) From 19b2df8afd9996f61cbeef29601bb7dd82bea2a7 Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Mon, 3 Oct 2016 17:51:43 +0300 Subject: [PATCH 22/42] fix indent, rename param advcampaign -> campaign --- README.md | 379 ++++++++++++++-------------- pyadmitad/items/lost_orders.py | 4 +- pyadmitad/tests/test_lost_orders.py | 2 +- 3 files changed, 192 insertions(+), 193 deletions(-) diff --git a/README.md b/README.md index 044662d..989c8e3 100644 --- a/README.md +++ b/README.md @@ -37,75 +37,75 @@ API Items ### Me ### ```python - res = client.Me.get() +res = client.Me.get() ``` ### Balance ### ```python - res = client.Balance.get() - res = client.Balance.get(extended=True) +res = client.Balance.get() +res = client.Balance.get(extended=True) ``` ### PaymentsSettings ### ```python - res = client.PaymentsSettings.get() - res = client.PaymentsSettings.get(currency='USD') +res = client.PaymentsSettings.get() +res = client.PaymentsSettings.get(currency='USD') ``` ### Types of websites ### ```python - res = client.WebsiteTypes.get() - res = client.WebsiteTypes.get(limit=2, offset=1) +res = client.WebsiteTypes.get() +res = client.WebsiteTypes.get(limit=2, offset=1) ``` ### Regions of websites ### ```python - res = client.WebsiteRegions.get() - res = client.WebsiteRegions.get(limit=2, offset=1) +res = client.WebsiteRegions.get() +res = client.WebsiteRegions.get(limit=2, offset=1) ``` ### Languages ### ```python - res = client.SystemLanguages.get() - res = client.SystemLanguages.get(limit=2, offset=1) - res = client.SystemLanguages.getOne(code='ru') +res = client.SystemLanguages.get() +res = client.SystemLanguages.get(limit=2, offset=1) +res = client.SystemLanguages.getOne(code='ru') ``` ### Currencies ### ```python - res = client.SystemCurrencies.get() - res = client.SystemCurrencies.get(limit=2, offset=1) +res = client.SystemCurrencies.get() +res = client.SystemCurrencies.get(limit=2, offset=1) ``` ### Advertising services ### ```python - res = client.AdvertiserServices.get() - res = client.AdvertiserServices.get(limit=2, offset=1) - res = client.AdvertiserServices.getOne(1) - res = client.AdvertiserServices.getForKind(kind='website') - res = client.AdvertiserServices.getForKindOne(2, kind='website') +res = client.AdvertiserServices.get() +res = client.AdvertiserServices.get(limit=2, offset=1) +res = client.AdvertiserServices.getOne(1) +res = client.AdvertiserServices.getForKind(kind='website') +res = client.AdvertiserServices.getForKindOne(2, kind='website') ``` ### Categories of advertising campaigns ### ```python - res = client.CampaignCategories.get() - res = client.CampaignCategories.get(campaign=10, language='en') - res = client.CampaignCategories.get(limit=2, offset=1) - res = client.CampaignCategories.getOne(2) +res = client.CampaignCategories.get() +res = client.CampaignCategories.get(campaign=10, language='en') +res = client.CampaignCategories.get(limit=2, offset=1) +res = client.CampaignCategories.getOne(2) ``` ### Coupons ## @@ -113,30 +113,30 @@ API Items ###### List of coupons ###### ```python - res = client.Coupons.get() - res = client.Coupons.get(order_by=['date_start', '-name']) - res = client.Coupons.get(order_by='-date_end') - res = client.Coupons.get(campaign=1, category=2) - res = client.Coupons.get(campaign=[1, 2], category=[2, 3]) - res = client.Coupons.getOne(2) +res = client.Coupons.get() +res = client.Coupons.get(order_by=['date_start', '-name']) +res = client.Coupons.get(order_by='-date_end') +res = client.Coupons.get(campaign=1, category=2) +res = client.Coupons.get(campaign=[1, 2], category=[2, 3]) +res = client.Coupons.getOne(2) ``` ###### List of coupons for a website ###### ```python - res = client.CouponsForWebsite.get(2) - res = client.CouponsForWebsite.get(2, order_by=date_start) - res = client.CouponsForWebsite.get(2, campaign=1, category=2) - res = client.CouponsForWebsite.get(2, campaign=[1, 2], category=2) - res = client.CouponsForWebsite.getOne(2, 1) +res = client.CouponsForWebsite.get(2) +res = client.CouponsForWebsite.get(2, order_by=date_start) +res = client.CouponsForWebsite.get(2, campaign=1, category=2) +res = client.CouponsForWebsite.get(2, campaign=[1, 2], category=2) +res = client.CouponsForWebsite.getOne(2, 1) ``` ###### List of coupons categories ###### ```python - res = client.CouponsCategories.get() - res = client.CouponsCategories.get(limit=10, offset=10) - res = client.CouponsCategories.getOne(2) +res = client.CouponsCategories.get() +res = client.CouponsCategories.get(limit=10, offset=10) +res = client.CouponsCategories.getOne(2) ``` ### Websites ### @@ -144,9 +144,9 @@ API Items ##### List of websites ##### ```python - res = client.Websites.get(limit=10) - res = client.Websites.get(status='new', campaign_status='active') - res = client.Websites.getOne(2) +res = client.Websites.get(limit=10) +res = client.Websites.get(status='new', campaign_status='active') +res = client.Websites.getOne(2) ``` ##### Manage websites ##### @@ -154,37 +154,37 @@ API Items ###### Create website ###### ```python - res = client.WebsitesManage.create( - name='website1', - kind='website', - language='ru', - adservice=2, - site_url='http://site.com', - description='description', - categories=[1, 2], - regions=['RU'], - atnd_hits=20, - atnd_visits=10, - mailing_targeting=False - ) +res = client.WebsitesManage.create( + name='website1', + kind='website', + language='ru', + adservice=2, + site_url='http://site.com', + description='description', + categories=[1, 2], + regions=['RU'], + atnd_hits=20, + atnd_visits=10, + mailing_targeting=False +) ``` ###### Update website ###### ```python - res = client.WebsitesManage.update(50, name='test', language='de') +res = client.WebsitesManage.update(50, name='test', language='de') ``` ###### Verify website ###### ```python - res = client.WebsitesManage.verify(50) +res = client.WebsitesManage.verify(50) ``` ###### Delete website ###### ```python - res = client.WebsitesManage.delete(50) +res = client.WebsitesManage.delete(50) ``` @@ -193,89 +193,89 @@ API Items ###### Statistics by websites ###### ```python - res = client.StatisticWebsites.get(website=1, campaign=1) - res = client.StatisticWebsites.get(subid="ADS778") - res = client.StatisticWebsites.get(limit=2) - res = client.StatisticWebsites.get(date_start='01.01.2013') +res = client.StatisticWebsites.get(website=1, campaign=1) +res = client.StatisticWebsites.get(subid="ADS778") +res = client.StatisticWebsites.get(limit=2) +res = client.StatisticWebsites.get(date_start='01.01.2013') ``` ###### Statistics by campaigns ###### ```python - res = client.StatisticCampaigns.get() - res = client.StatisticCampaigns.get(website=1, campaign=1) - res = client.StatisticCampaigns.get(subid="ADS778") - res = client.StatisticCampaigns.get(limit=2) - res = client.StatisticCampaigns.get(date_start='01.01.2013') +res = client.StatisticCampaigns.get() +res = client.StatisticCampaigns.get(website=1, campaign=1) +res = client.StatisticCampaigns.get(subid="ADS778") +res = client.StatisticCampaigns.get(limit=2) +res = client.StatisticCampaigns.get(date_start='01.01.2013') ``` ###### Statistics by days ###### ```python - res = client.StatisticDays.get() - res = client.StatisticDays.get(website=1, campaign=1) - res = client.StatisticDays.get(subid="ADS778") - res = client.StatisticDays.get(limit=2) - res = client.StatisticDays.get(date_start='01.01.2013') +res = client.StatisticDays.get() +res = client.StatisticDays.get(website=1, campaign=1) +res = client.StatisticDays.get(subid="ADS778") +res = client.StatisticDays.get(limit=2) +res = client.StatisticDays.get(date_start='01.01.2013') ``` ###### Statistics by months ###### ```python - res = client.StatisticMonths.get() - res = client.StatisticMonths.get(website=1, campaign=1) - res = client.StatisticMonths.get(subid="ADS778") - res = client.StatisticMonths.get(limit=2) - res = client.StatisticMonths.get(date_start='01.01.2013') +res = client.StatisticMonths.get() +res = client.StatisticMonths.get(website=1, campaign=1) +res = client.StatisticMonths.get(subid="ADS778") +res = client.StatisticMonths.get(limit=2) +res = client.StatisticMonths.get(date_start='01.01.2013') ``` ###### Statistics by actions ###### ```python - res = client.StatisticActions.get() - res = client.StatisticActions.get(date_start='01.01.2013') - res = client.StatisticActions.get(website=1, campaign=1) - res = client.StatisticActions.get(subid="ADS778") - res = client.StatisticActions.get(subid2="ADS778") - res = client.StatisticActions.get(limit=2) +res = client.StatisticActions.get() +res = client.StatisticActions.get(date_start='01.01.2013') +res = client.StatisticActions.get(website=1, campaign=1) +res = client.StatisticActions.get(subid="ADS778") +res = client.StatisticActions.get(subid2="ADS778") +res = client.StatisticActions.get(limit=2) ``` ###### Statistics by sub-ids ###### ```python - res = client.StatisticSubIds.get() - res = client.StatisticSubIds.get(date_start='01.01.2013') - res = client.StatisticSubIds.get(subid="ADS778") - res = client.StatisticSubIds.get(subid1="ADS778", sub_id_number=2) - res = client.StatisticSubIds.get(limit=2) +res = client.StatisticSubIds.get() +res = client.StatisticSubIds.get(date_start='01.01.2013') +res = client.StatisticSubIds.get(subid="ADS778") +res = client.StatisticSubIds.get(subid1="ADS778", sub_id_number=2) +res = client.StatisticSubIds.get(limit=2) ``` ###### Statistics by sources ###### ```python - res = client.StatisticSources.get() - res = client.StatisticSources.get(date_start='01.01.2013') - res = client.StatisticSources.get(limit=2) +res = client.StatisticSources.get() +res = client.StatisticSources.get(date_start='01.01.2013') +res = client.StatisticSources.get(limit=2) ``` ###### Statistics by keywords ###### ```python - res = client.StatisticKeywords.get() - res = client.StatisticKeywords.get(date_start='01.01.2013') - res = client.StatisticKeywords.get(limit=2) +res = client.StatisticKeywords.get() +res = client.StatisticKeywords.get(date_start='01.01.2013') +res = client.StatisticKeywords.get(limit=2) ``` ### Referrals ### ```python - res = client.Referrals.get() - res = client.Referrals.get(limit=2) - res = client.Referrals.getOne(2) +res = client.Referrals.get() +res = client.Referrals.get(limit=2) +res = client.Referrals.getOne(2) ``` @@ -284,17 +284,16 @@ API Items ###### List of banners ###### ```python - res = client.Banners.get(2) - res = client.Banners.get(2, mobile_content=False, limit=2) +res = client.Banners.get(2) +res = client.Banners.get(2, mobile_content=False, limit=2) ``` - ###### List of banners for a website ###### ```python - res = client.BannersForWebsite.get(_id=2, w_id=3) - res = client.BannersForWebsite.get(2, 3) - res = client.BannersForWebsite.get(2, 3, uri_scheme='https', limit=5) +res = client.BannersForWebsite.get(_id=2, w_id=3) +res = client.BannersForWebsite.get(2, 3) +res = client.BannersForWebsite.get(2, 3, uri_scheme='https', limit=5) ``` @@ -303,26 +302,26 @@ API Items ###### List of campaigns ###### ```python - res = client.Campaigns.get() - res = client.Campaigns.get(limit=2) - res = client.Campaigns.getOne(2) +res = client.Campaigns.get() +res = client.Campaigns.get(limit=2) +res = client.Campaigns.getOne(2) ``` ###### List of campaigns for a website ###### ```python - res = client.CampaignsForWebsite.get(22) - res = client.CampaignsForWebsite.get(limit=2) - res = client.CampaignsForWebsite.getOne(6, 22) +res = client.CampaignsForWebsite.get(22) +res = client.CampaignsForWebsite.get(limit=2) +res = client.CampaignsForWebsite.getOne(6, 22) ``` ###### Manage campaigns ###### ```python - res = client.CampaignsManage.connect(6, 22) - res = client.CampaignsManage.connect(c_id=6, w_id=22) - res = client.CampaignsManage.disconnect(6, 22) - res = client.CampaignsManage.disconnect(c_id=6, w_id=22) +res = client.CampaignsManage.connect(6, 22) +res = client.CampaignsManage.connect(c_id=6, w_id=22) +res = client.CampaignsManage.disconnect(6, 22) +res = client.CampaignsManage.disconnect(c_id=6, w_id=22) ``` @@ -332,24 +331,24 @@ API Items ###### List of payment ###### ```python - res = client.Payments.get() - res = client.Payments.get(limit=2, has_statement=True) - res = client.Payments.getOne(2) +res = client.Payments.get() +res = client.Payments.get(limit=2, has_statement=True) +res = client.Payments.getOne(2) ``` ###### Payments statement ###### ```python - res = client.PaymentsStatement.get(12) - res = client.PaymentsStatement.get(12, detailed=True) +res = client.PaymentsStatement.get(12) +res = client.PaymentsStatement.get(12, detailed=True) ``` ###### Manage payments ###### ```python - res = client.PaymentsManage.create('USD') - res = client.PaymentsManage.confirm(71) - res = client.PaymentsManage.delete(71) +res = client.PaymentsManage.create('USD') +res = client.PaymentsManage.confirm(71) +res = client.PaymentsManage.delete(71) ``` ### Broken links ### @@ -357,16 +356,16 @@ API Items ###### List of broken links ###### ```python - res = client.BrokenLinks.get() - res = client.BrokenLinks.get(website=[10, 20], date_start='01.01.2010') - res = client.BrokenLinks.getOne(10) +res = client.BrokenLinks.get() +res = client.BrokenLinks.get(website=[10, 20], date_start='01.01.2010') +res = client.BrokenLinks.getOne(10) ``` ###### Manage broken links ###### ```python - res = client.ManageBrokenLinks.resolve(10) - res = client.ManageBrokenLinks.resolve([10, 11, 12]) +res = client.ManageBrokenLinks.resolve(10) +res = client.ManageBrokenLinks.resolve([10, 11, 12]) ``` ### Announcements ### @@ -374,8 +373,8 @@ API Items ###### List of annouuncements ###### ```python - res = client.Announcements.get() - res = client.Announcements.getOne(10) +res = client.Announcements.get() +res = client.Announcements.getOne(10) ``` ### News ### @@ -383,9 +382,9 @@ API Items ###### List of news ###### ```python - res = client.News.get() - res = client.News.get(limit=10, offset=20) - res = client.News.getOne(10) +res = client.News.get() +res = client.News.get(limit=10, offset=20) +res = client.News.getOne(10) ``` ### Links validator ### @@ -393,7 +392,7 @@ API Items ###### Validate link ###### ```python - res = client.LinksValidator.get('https://admitad.com/some_url/') +res = client.LinksValidator.get('https://admitad.com/some_url/') ``` ### Landings ### @@ -401,15 +400,15 @@ API Items ###### List of landings ###### ```python - res = client.Landings.get(10) - res = client.Landings.get(10, limit=100) +res = client.Landings.get(10) +res = client.Landings.get(10, limit=100) ``` ###### List of landings for website ###### ```python - res = client.LandingsForWebsite.get(10, 22) - res = client.LandingsForWebsite.get(10, 22, limit=100) +res = client.LandingsForWebsite.get(10, 22) +res = client.LandingsForWebsite.get(10, 22, limit=100) ``` ### Deeplinks ### @@ -417,7 +416,7 @@ API Items ###### Create deeplink ###### ```python - res = client.DeeplinksManage.create(22, 10, ulp='https://admitad.com/some/', subid='AS32djkd31') +res = client.DeeplinksManage.create(22, 10, ulp='https://admitad.com/some/', subid='AS32djkd31') ``` ### Referrals ### @@ -425,9 +424,9 @@ API Items ###### List of referrals ###### ```python - res = client.Referrals.get() - res = client.Referrals.get(date_start='01.01.2010', date_end=datetime.today()) - res = client.Referrals.getOne(181) +res = client.Referrals.get() +res = client.Referrals.get(date_start='01.01.2010', date_end=datetime.today()) +res = client.Referrals.getOne(181) ``` ### Optcodes ### @@ -435,34 +434,34 @@ API Items ###### List of opt-codes ###### ```python - res = client.OptCodes.get() - res = client.OptCodes.get(campaign=100, order_by=['method', 'desc_mode') - res = client.OptCodes.getOne(11) +res = client.OptCodes.get() +res = client.OptCodes.get(campaign=100, order_by=['method', 'desc_mode') +res = client.OptCodes.getOne(11) ``` ###### Offer status opt-codes manager ###### ```python - res = client.OfferStatusOptCodesManager.create( - website=10, campaign=100, desc_mode=0, method=l, - url='https://admitad.com/foobarbaz/' - ) - res = client.OfferStatusOptCodesManager.update( - desc_mode=1, method=1 - ) +res = client.OfferStatusOptCodesManager.create( + website=10, campaign=100, desc_mode=0, method=l, + url='https://admitad.com/foobarbaz/' +) +res = client.OfferStatusOptCodesManager.update( + desc_mode=1, method=1 +) ``` ###### Action opt-codes manager ###### ```python - res = client.ActionOptCodesManager.create( - website=10, campaign=100, desc_mode=0, method=l, - url='https://admitad.com/foobarbaz/', - action_type=0, status=1 - ) - res = client.ActionOptCodesManager.update( - desc_mode=1, method=1, action_type=1, status=2 - ) +res = client.ActionOptCodesManager.create( + website=10, campaign=100, desc_mode=0, method=l, + url='https://admitad.com/foobarbaz/', + action_type=0, status=1 +) +res = client.ActionOptCodesManager.update( + desc_mode=1, method=1, action_type=1, status=2 +) ``` ### Lost orders ### @@ -470,21 +469,21 @@ API Items ###### List of lost orders ###### ```python - res = client.LostOrders.get() - res = client.LostOrders.get(limit=20, offset=0) - res = client.LostOrders.getOne(76) +res = client.LostOrders.get() +res = client.LostOrders.get(limit=20, offset=0) +res = client.LostOrders.getOne(76) ``` ###### Lost orders manager ###### ```python - res = client.LostOrdersManager.create( - attachments=['/home/user/f.png', '/home/user/s.png'], - advcampaign=100, website=10, - order_id='039NRUHFJEW', order_date='12.08.2016', order_price=345.77, - comment='some comment' - ) - res = client.LostOrdersManager.delete(77) +res = client.LostOrdersManager.create( + attachments=['/home/user/f.png', '/home/user/s.png'], + campaign=100, website=10, + order_id='039NRUHFJEW', order_date='12.08.2016', order_price=345.77, + comment='some comment' +) +res = client.LostOrdersManager.delete(77) ``` ### Arecords ### @@ -492,9 +491,9 @@ API Items ###### List of arecords ###### ```python - res = client.Arecords.get() - res = client.Arecords.get(limit=50) - res = client.Arecords.getForWebsite(10) +res = client.Arecords.get() +res = client.Arecords.get(limit=50) +res = client.Arecords.getForWebsite(10) ``` ### Retag ### @@ -502,22 +501,22 @@ API Items ###### List of retag ###### ```python - res = client.Retag.get() - res = client.Retag.get(website=10, active=False, limit=50) - res = client.Retag.getOne(54) - res = client.Retag.getLevelsForWebsite(10) - res = client.Retag.getLevelsForCampaign(100) +res = client.Retag.get() +res = client.Retag.get(website=10, active=False, limit=50) +res = client.Retag.getOne(54) +res = client.Retag.getLevelsForWebsite(10) +res = client.Retag.getLevelsForCampaign(100) ``` ###### Retag manager ###### ```python - res = client.RetagManager.create( - website=10, level=22, active=False, - script='some js script', comment='some comment' - ) - res = client.RetagManager.update(16, level=10, active=True) - res = client.RetagManager.delete(88) +res = client.RetagManager.create( + website=10, level=22, active=False, + script='some js script', comment='some comment' +) +res = client.RetagManager.update(16, level=10, active=True) +res = client.RetagManager.delete(88) ``` ### Tickets ### @@ -525,19 +524,19 @@ API Items ###### List of tickets ###### ```python - res = client.Tickets.get() - res = client.Tickets.get(date_start='01.01.2016', status=0) - res = client.Tickets.getOne(50) +res = client.Tickets.get() +res = client.Tickets.get(date_start='01.01.2016', status=0) +res = client.Tickets.getOne(50) ``` ###### Ticket manager ###### ```python - res = client.TicketsManager.create( - subject='subject', text='some text', - campaign=100, category=27, priority=0, - ) - res = client.TicketsManager.comment(12, text='some comment') +res = client.TicketsManager.create( + subject='subject', text='some text', + campaign=100, category=27, priority=0, +) +res = client.TicketsManager.comment(12, text='some comment') ``` Notes @@ -547,6 +546,6 @@ It is possible to override the default response handler by passing handler as a keyword argument to a client function call. For example: ```python - func = lambda x: (x, x) - result = client.Me.get(handler=func) +func = lambda x: (x, x) +result = client.Me.get(handler=func) ``` diff --git a/pyadmitad/items/lost_orders.py b/pyadmitad/items/lost_orders.py index 6cc70fe..2618ef6 100644 --- a/pyadmitad/items/lost_orders.py +++ b/pyadmitad/items/lost_orders.py @@ -48,7 +48,7 @@ class LostOrdersManager(Item): CREATE_URL = Item.prepare_url('lost_orders/create') CREATE_FIELDS = { - 'advcampaign': lambda x: Item.sanitize_integer_value(x, 'advcampaign'), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign'), 'website': lambda x: Item.sanitize_integer_value(x, 'website'), 'order_id': lambda x: Item.sanitize_string_value(x, 'order_id'), 'order_date': lambda x: Item.sanitize_date(x, 'order_date'), @@ -73,7 +73,7 @@ def create(self, attachments, **kwargs): """ Args: attachments (list of str) - advcampaign (int) + campaign (int) website (int) order_id (str) order_date (date) diff --git a/pyadmitad/tests/test_lost_orders.py b/pyadmitad/tests/test_lost_orders.py index b728342..2714f32 100644 --- a/pyadmitad/tests/test_lost_orders.py +++ b/pyadmitad/tests/test_lost_orders.py @@ -57,7 +57,7 @@ def test_create_lost_order(self): result = self.client.LostOrdersManager.create( attachments=['./pyadmitad/tests/data/image.png'], website=10, - advcampaign=20, + campaign=20, order_id='asd3f3', order_date='01.01.2010', order_price=1200, From 0a95849e4f2c8bfc49b247cc7b77862bd00de1cf Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Mon, 3 Oct 2016 17:56:32 +0300 Subject: [PATCH 23/42] fix README example --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 989c8e3..c66bcdc 100644 --- a/README.md +++ b/README.md @@ -11,11 +11,11 @@ Install Example ------- ```python -from pyadmitad import api +from pyadmitad import api, items client_id = "[client_id]" client_secret = "[client_secret]" -scope = ' '.join(set([client.Me.SCOPE])) +scope = ' '.join(set([items.Me.SCOPE])) client = api.get_oauth_client_client( client_id, From f7f0eda6343d0fa1c3ad2d420c600c548c43e6de Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Tue, 4 Oct 2016 12:34:52 +0300 Subject: [PATCH 24/42] update setup.py --- setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/setup.py b/setup.py index ba9a5f4..37a0235 100644 --- a/setup.py +++ b/setup.py @@ -9,6 +9,7 @@ description='A Python wrapper around the Admitad API', license='MIT', url='https://github.com/admitad/admitad-python-api', + download_url='https://github.com/admitad/admitad-python-api/tarball/1.0', keywords=['admitad'], install_requires=['requests>=2.0', 'future'], tests_require=['nose2', 'responses'], From 8625389f25f4bc8504767e241b9798ca65f7e5dc Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Thu, 13 Oct 2016 13:46:20 +0300 Subject: [PATCH 25/42] improve resources --- pyadmitad/items/announcements.py | 22 ++++++++++++++++++++-- pyadmitad/items/deeplinks.py | 4 ++-- pyadmitad/items/news.py | 27 +++++++++++++++++++++++---- pyadmitad/items/websites.py | 15 +++------------ 4 files changed, 48 insertions(+), 20 deletions(-) diff --git a/pyadmitad/items/announcements.py b/pyadmitad/items/announcements.py index 405479a..ae80c89 100644 --- a/pyadmitad/items/announcements.py +++ b/pyadmitad/items/announcements.py @@ -25,19 +25,37 @@ def get(self, **kwargs): Args: limit (int) offset (int) + language (str) """ - return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + filtering = { + 'filter_by': kwargs, + 'available': { + 'language': lambda x: Item.sanitize_string_value(x, 'language', 2, 2, True), + } + } + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) def getOne(self, _id, **kwargs): """ Args: _id (int) + language (str) """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'language': lambda x: Item.sanitize_string_value(x, 'language', 2, 2, True), + } + } + request_data = { 'url': self.SINGLE_URL, 'announcement_id': Item.sanitize_id(_id) } - return self.transport.get().request(**request_data) + return self.transport.get().set_filtering(filtering).request(**request_data) diff --git a/pyadmitad/items/deeplinks.py b/pyadmitad/items/deeplinks.py index 0703593..3ee9d87 100644 --- a/pyadmitad/items/deeplinks.py +++ b/pyadmitad/items/deeplinks.py @@ -16,7 +16,7 @@ class DeeplinksManage(Item): CREATE_URL = Item.prepare_url('deeplink/%(website_id)s/advcampaign/%(campaign_id)s') CREATE_FIELDS = { - 'ulp': lambda x: Item.sanitize_string_value(x, 'ulp'), + 'ulp': lambda x: Item.sanitize_string_array(x, 'ulp'), 'subid': lambda x: Item.sanitize_string_value(x, 'subid', max_length=30), # todo: subid[1-4] } @@ -26,7 +26,7 @@ def create(self, website_id, campaign_id, **kwargs): Args: website_id (int) campaign_id (int) - ulp (str) + ulp (list of str) subid (str) """ diff --git a/pyadmitad/items/news.py b/pyadmitad/items/news.py index 55eb334..57e68df 100644 --- a/pyadmitad/items/news.py +++ b/pyadmitad/items/news.py @@ -25,19 +25,38 @@ def get(self, **kwargs): Args: limit (int) offset (int) + language (str) """ - return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + filtering = { + 'filter_by': kwargs, + 'available': { + 'language': lambda x: Item.sanitize_string_value(x, 'language', 2, 2, True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) - def getOne(self, news_id): + def getOne(self, news_id, **kwargs): """ Args: news_id (int) + language (str) """ - data = { + request_data = { 'url': self.SINGLE_URL, 'news_id': self.sanitize_id(news_id) } - return self.transport.get().request(**data) + filtering = { + 'filter_by': kwargs, + 'available': { + 'language': lambda x: Item.sanitize_string_value(x, 'language', 2, 2, True), + } + } + + return self.transport.get().set_filtering(filtering).request(**request_data) diff --git a/pyadmitad/items/websites.py b/pyadmitad/items/websites.py index c675d7e..60fc2e5 100644 --- a/pyadmitad/items/websites.py +++ b/pyadmitad/items/websites.py @@ -107,10 +107,6 @@ class WebsitesManage(Item): x, 'categories'), 'regions': lambda x: Item.sanitize_string_array( x, 'regions', max_length=2), - 'atnd_visits': lambda x: Item.sanitize_integer_value( - x, 'atnd_visits', blank=False), - 'atnd_hits': lambda x: Item.sanitize_integer_value( - x, 'atnd_hits', blank=False), 'mailing_targeting': lambda x: Item.sanitize_bool_integer_value( x, 'mailing_targeting', blank=True) } @@ -118,6 +114,8 @@ class WebsitesManage(Item): UPDATE_FIELDS = { 'name': lambda x: Item.sanitize_string_value( x, 'name', max_length=200, blank=True), + 'kind': lambda x: Item.sanitize_string_value( + x, 'kind', max_length=20, blank=True), 'language': lambda x: Item.sanitize_string_value( x, 'language', max_length=2, blank=True), 'adservice': lambda x: Item.sanitize_integer_value( @@ -130,10 +128,6 @@ class WebsitesManage(Item): x, 'categories', blank=True), 'regions': lambda x: Item.sanitize_string_array( x, 'regions', max_length=2, blank=True), - 'atnd_visits': lambda x: Item.sanitize_integer_value( - x, 'atnd_visits', blank=True), - 'atnd_hits': lambda x: Item.sanitize_integer_value( - x, 'atnd_hits', blank=True), 'mailing_targeting': lambda x: Item.sanitize_bool_integer_value( x, 'mailing_targeting', blank=True) } @@ -149,8 +143,6 @@ def create(self, **kwargs): description (str) categories (list of int) regions (list of str) - atnd_visits (int) - atnd_hits (int) mailing_targeting (bool) """ @@ -163,14 +155,13 @@ def update(self, _id, **kwargs): Args: _id (int) name (str) + kind (str) language (str) adservice (int) site_url (str) description (str) categories (list of int) regions (list of str) - atnd_visits (int) - atnd_hits (int) mailing_targeting (bool) """ From a6e6a6b2e50472982cc99095bf8ea522884cf9b2 Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Tue, 4 Oct 2016 12:45:39 +0300 Subject: [PATCH 26/42] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c66bcdc..57d7108 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ A Python wrapper around the [Admitad API](https://developers.admitad.com/en/) Install ------- - pip install admitad-api + pip install pyadmitad Example ------- From 485d09af3a05bfb28c193cd777682f3d0c644c41 Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Fri, 4 Nov 2016 17:36:36 +0300 Subject: [PATCH 27/42] rename lib pyadmitad => admitad --- README.md | 4 ++-- {pyadmitad => admitad}/__init__.py | 0 {pyadmitad => admitad}/api.py | 2 +- {pyadmitad => admitad}/client.py | 2 +- {pyadmitad => admitad}/constants.py | 0 {pyadmitad => admitad}/exceptions.py | 0 admitad/items/__init__.py | 20 ++++++++++++++++++ {pyadmitad => admitad}/items/announcements.py | 2 +- {pyadmitad => admitad}/items/arecords.py | 2 +- {pyadmitad => admitad}/items/auxiliary.py | 2 +- {pyadmitad => admitad}/items/banners.py | 2 +- {pyadmitad => admitad}/items/base.py | 2 +- {pyadmitad => admitad}/items/broken_links.py | 2 +- {pyadmitad => admitad}/items/campaigns.py | 2 +- {pyadmitad => admitad}/items/coupons.py | 2 +- {pyadmitad => admitad}/items/deeplinks.py | 2 +- {pyadmitad => admitad}/items/landings.py | 2 +- {pyadmitad => admitad}/items/links.py | 2 +- {pyadmitad => admitad}/items/lost_orders.py | 2 +- {pyadmitad => admitad}/items/me.py | 2 +- {pyadmitad => admitad}/items/news.py | 2 +- {pyadmitad => admitad}/items/optcodes.py | 2 +- {pyadmitad => admitad}/items/payments.py | 2 +- {pyadmitad => admitad}/items/referrals.py | 2 +- {pyadmitad => admitad}/items/retag.py | 2 +- {pyadmitad => admitad}/items/statistics.py | 4 ++-- {pyadmitad => admitad}/items/tickets.py | 2 +- {pyadmitad => admitad}/items/websites.py | 2 +- {pyadmitad => admitad}/tests/__init__.py | 0 {pyadmitad => admitad}/tests/base.py | 2 +- {pyadmitad => admitad}/tests/data/image.png | Bin .../tests/test_announcements.py | 4 ++-- {pyadmitad => admitad}/tests/test_arecords.py | 4 ++-- .../tests/test_auxiliary.py | 4 ++-- {pyadmitad => admitad}/tests/test_banners.py | 4 ++-- {pyadmitad => admitad}/tests/test_base.py | 6 +++--- .../tests/test_broken_links.py | 4 ++-- .../tests/test_campaigns.py | 4 ++-- {pyadmitad => admitad}/tests/test_coupons.py | 4 ++-- .../tests/test_deeplinks.py | 4 ++-- {pyadmitad => admitad}/tests/test_landings.py | 4 ++-- {pyadmitad => admitad}/tests/test_links.py | 4 ++-- .../tests/test_lost_orders.py | 6 +++--- {pyadmitad => admitad}/tests/test_me.py | 4 ++-- {pyadmitad => admitad}/tests/test_news.py | 4 ++-- {pyadmitad => admitad}/tests/test_optcodes.py | 4 ++-- {pyadmitad => admitad}/tests/test_payments.py | 6 +++--- .../tests/test_referrals.py | 6 +++--- {pyadmitad => admitad}/tests/test_retag.py | 6 +++--- .../tests/test_statistics.py | 6 +++--- {pyadmitad => admitad}/tests/test_tickets.py | 6 +++--- .../tests/test_transport.py | 8 +++---- {pyadmitad => admitad}/tests/test_websites.py | 4 ++-- {pyadmitad => admitad}/transport.py | 4 ++-- pyadmitad/items/__init__.py | 20 ------------------ setup.py | 4 ++-- 56 files changed, 104 insertions(+), 104 deletions(-) rename {pyadmitad => admitad}/__init__.py (100%) rename {pyadmitad => admitad}/api.py (94%) rename {pyadmitad => admitad}/client.py (90%) rename {pyadmitad => admitad}/constants.py (100%) rename {pyadmitad => admitad}/exceptions.py (100%) create mode 100644 admitad/items/__init__.py rename {pyadmitad => admitad}/items/announcements.py (97%) rename {pyadmitad => admitad}/items/arecords.py (95%) rename {pyadmitad => admitad}/items/auxiliary.py (99%) rename {pyadmitad => admitad}/items/banners.py (98%) rename {pyadmitad => admitad}/items/base.py (98%) rename {pyadmitad => admitad}/items/broken_links.py (98%) rename {pyadmitad => admitad}/items/campaigns.py (99%) rename {pyadmitad => admitad}/items/coupons.py (99%) rename {pyadmitad => admitad}/items/deeplinks.py (96%) rename {pyadmitad => admitad}/items/landings.py (96%) rename {pyadmitad => admitad}/items/links.py (92%) rename {pyadmitad => admitad}/items/lost_orders.py (98%) rename {pyadmitad => admitad}/items/me.py (97%) rename {pyadmitad => admitad}/items/news.py (97%) rename {pyadmitad => admitad}/items/optcodes.py (99%) rename {pyadmitad => admitad}/items/payments.py (98%) rename {pyadmitad => admitad}/items/referrals.py (96%) rename {pyadmitad => admitad}/items/retag.py (99%) rename {pyadmitad => admitad}/items/statistics.py (99%) rename {pyadmitad => admitad}/items/tickets.py (98%) rename {pyadmitad => admitad}/items/websites.py (99%) rename {pyadmitad => admitad}/tests/__init__.py (100%) rename {pyadmitad => admitad}/tests/base.py (90%) rename {pyadmitad => admitad}/tests/data/image.png (100%) rename {pyadmitad => admitad}/tests/test_announcements.py (96%) rename {pyadmitad => admitad}/tests/test_arecords.py (96%) rename {pyadmitad => admitad}/tests/test_auxiliary.py (98%) rename {pyadmitad => admitad}/tests/test_banners.py (94%) rename {pyadmitad => admitad}/tests/test_base.py (98%) rename {pyadmitad => admitad}/tests/test_broken_links.py (95%) rename {pyadmitad => admitad}/tests/test_campaigns.py (96%) rename {pyadmitad => admitad}/tests/test_coupons.py (97%) rename {pyadmitad => admitad}/tests/test_deeplinks.py (90%) rename {pyadmitad => admitad}/tests/test_landings.py (92%) rename {pyadmitad => admitad}/tests/test_links.py (90%) rename {pyadmitad => admitad}/tests/test_lost_orders.py (93%) rename {pyadmitad => admitad}/tests/test_me.py (97%) rename {pyadmitad => admitad}/tests/test_news.py (97%) rename {pyadmitad => admitad}/tests/test_optcodes.py (96%) rename {pyadmitad => admitad}/tests/test_payments.py (94%) rename {pyadmitad => admitad}/tests/test_referrals.py (95%) rename {pyadmitad => admitad}/tests/test_retag.py (95%) rename {pyadmitad => admitad}/tests/test_statistics.py (98%) rename {pyadmitad => admitad}/tests/test_tickets.py (92%) rename {pyadmitad => admitad}/tests/test_transport.py (96%) rename {pyadmitad => admitad}/tests/test_websites.py (99%) rename {pyadmitad => admitad}/transport.py (97%) delete mode 100644 pyadmitad/items/__init__.py diff --git a/README.md b/README.md index 57d7108..1173efe 100644 --- a/README.md +++ b/README.md @@ -6,12 +6,12 @@ A Python wrapper around the [Admitad API](https://developers.admitad.com/en/) Install ------- - pip install pyadmitad + pip install admitad Example ------- ```python -from pyadmitad import api, items +from admitad import api, items client_id = "[client_id]" client_secret = "[client_secret]" diff --git a/pyadmitad/__init__.py b/admitad/__init__.py similarity index 100% rename from pyadmitad/__init__.py rename to admitad/__init__.py diff --git a/pyadmitad/api.py b/admitad/api.py similarity index 94% rename from pyadmitad/api.py rename to admitad/api.py index 58ecf58..fd09bd9 100644 --- a/pyadmitad/api.py +++ b/admitad/api.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad import client, transport +from admitad import client, transport def get_oauth_client_token(access_token, user_agent=None, debug=False): diff --git a/pyadmitad/client.py b/admitad/client.py similarity index 90% rename from pyadmitad/client.py rename to admitad/client.py index f6277d2..8253cda 100644 --- a/pyadmitad/client.py +++ b/admitad/client.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad import items +from admitad import items class Client(object): diff --git a/pyadmitad/constants.py b/admitad/constants.py similarity index 100% rename from pyadmitad/constants.py rename to admitad/constants.py diff --git a/pyadmitad/exceptions.py b/admitad/exceptions.py similarity index 100% rename from pyadmitad/exceptions.py rename to admitad/exceptions.py diff --git a/admitad/items/__init__.py b/admitad/items/__init__.py new file mode 100644 index 0000000..a6a71be --- /dev/null +++ b/admitad/items/__init__.py @@ -0,0 +1,20 @@ +from admitad.items.me import * +from admitad.items.websites import * +from admitad.items.auxiliary import * +from admitad.items.announcements import * +from admitad.items.news import * +from admitad.items.links import * +from admitad.items.landings import * +from admitad.items.deeplinks import * +from admitad.items.referrals import * +from admitad.items.payments import * +from admitad.items.coupons import * +from admitad.items.statistics import * +from admitad.items.banners import * +from admitad.items.campaigns import * +from admitad.items.optcodes import * +from admitad.items.lost_orders import * +from admitad.items.arecords import * +from admitad.items.retag import * +from admitad.items.broken_links import * +from admitad.items.tickets import * diff --git a/pyadmitad/items/announcements.py b/admitad/items/announcements.py similarity index 97% rename from pyadmitad/items/announcements.py rename to admitad/items/announcements.py index ae80c89..356716c 100644 --- a/pyadmitad/items/announcements.py +++ b/admitad/items/announcements.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/arecords.py b/admitad/items/arecords.py similarity index 95% rename from pyadmitad/items/arecords.py rename to admitad/items/arecords.py index e54280c..e245de7 100644 --- a/pyadmitad/items/arecords.py +++ b/admitad/items/arecords.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/auxiliary.py b/admitad/items/auxiliary.py similarity index 99% rename from pyadmitad/items/auxiliary.py rename to admitad/items/auxiliary.py index 253cbf4..08fc9a5 100644 --- a/pyadmitad/items/auxiliary.py +++ b/admitad/items/auxiliary.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = ( diff --git a/pyadmitad/items/banners.py b/admitad/items/banners.py similarity index 98% rename from pyadmitad/items/banners.py rename to admitad/items/banners.py index 82b55ce..dc853fe 100644 --- a/pyadmitad/items/banners.py +++ b/admitad/items/banners.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/base.py b/admitad/items/base.py similarity index 98% rename from pyadmitad/items/base.py rename to admitad/items/base.py index f697f9f..6d8f2ec 100644 --- a/pyadmitad/items/base.py +++ b/admitad/items/base.py @@ -8,7 +8,7 @@ from datetime import datetime, date from urllib.parse import urljoin -from pyadmitad.constants import BASE_URL, DATE_FORMAT, LONG_DATE_FORMAT +from admitad.constants import BASE_URL, DATE_FORMAT, LONG_DATE_FORMAT class Item(object): diff --git a/pyadmitad/items/broken_links.py b/admitad/items/broken_links.py similarity index 98% rename from pyadmitad/items/broken_links.py rename to admitad/items/broken_links.py index 7a578e4..750aaba 100644 --- a/pyadmitad/items/broken_links.py +++ b/admitad/items/broken_links.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/campaigns.py b/admitad/items/campaigns.py similarity index 99% rename from pyadmitad/items/campaigns.py rename to admitad/items/campaigns.py index 7efd859..9633c6c 100644 --- a/pyadmitad/items/campaigns.py +++ b/admitad/items/campaigns.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/coupons.py b/admitad/items/coupons.py similarity index 99% rename from pyadmitad/items/coupons.py rename to admitad/items/coupons.py index cfba38b..7f42f23 100644 --- a/pyadmitad/items/coupons.py +++ b/admitad/items/coupons.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/deeplinks.py b/admitad/items/deeplinks.py similarity index 96% rename from pyadmitad/items/deeplinks.py rename to admitad/items/deeplinks.py index 3ee9d87..ecaa49c 100644 --- a/pyadmitad/items/deeplinks.py +++ b/admitad/items/deeplinks.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/landings.py b/admitad/items/landings.py similarity index 96% rename from pyadmitad/items/landings.py rename to admitad/items/landings.py index 5e91f58..27c699f 100644 --- a/pyadmitad/items/landings.py +++ b/admitad/items/landings.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/links.py b/admitad/items/links.py similarity index 92% rename from pyadmitad/items/links.py rename to admitad/items/links.py index 03ad3b4..de58f96 100644 --- a/pyadmitad/items/links.py +++ b/admitad/items/links.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item class LinksValidator(Item): diff --git a/pyadmitad/items/lost_orders.py b/admitad/items/lost_orders.py similarity index 98% rename from pyadmitad/items/lost_orders.py rename to admitad/items/lost_orders.py index 2618ef6..f113c68 100644 --- a/pyadmitad/items/lost_orders.py +++ b/admitad/items/lost_orders.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/me.py b/admitad/items/me.py similarity index 97% rename from pyadmitad/items/me.py rename to admitad/items/me.py index 5ab521e..1274067 100644 --- a/pyadmitad/items/me.py +++ b/admitad/items/me.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = ( diff --git a/pyadmitad/items/news.py b/admitad/items/news.py similarity index 97% rename from pyadmitad/items/news.py rename to admitad/items/news.py index 57e68df..8f1dcbd 100644 --- a/pyadmitad/items/news.py +++ b/admitad/items/news.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/optcodes.py b/admitad/items/optcodes.py similarity index 99% rename from pyadmitad/items/optcodes.py rename to admitad/items/optcodes.py index 860f058..865aae4 100644 --- a/pyadmitad/items/optcodes.py +++ b/admitad/items/optcodes.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/payments.py b/admitad/items/payments.py similarity index 98% rename from pyadmitad/items/payments.py rename to admitad/items/payments.py index 5559ea0..196f9f5 100644 --- a/pyadmitad/items/payments.py +++ b/admitad/items/payments.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/referrals.py b/admitad/items/referrals.py similarity index 96% rename from pyadmitad/items/referrals.py rename to admitad/items/referrals.py index 71d9b2a..123814c 100644 --- a/pyadmitad/items/referrals.py +++ b/admitad/items/referrals.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/retag.py b/admitad/items/retag.py similarity index 99% rename from pyadmitad/items/retag.py rename to admitad/items/retag.py index 1ff5b19..4096833 100644 --- a/pyadmitad/items/retag.py +++ b/admitad/items/retag.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/statistics.py b/admitad/items/statistics.py similarity index 99% rename from pyadmitad/items/statistics.py rename to admitad/items/statistics.py index 9e47053..3fecdc4 100644 --- a/pyadmitad/items/statistics.py +++ b/admitad/items/statistics.py @@ -3,8 +3,8 @@ from copy import copy -from pyadmitad.constants import MAX_SUB_ID_LENGTH -from pyadmitad.items.base import Item +from admitad.constants import MAX_SUB_ID_LENGTH +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/tickets.py b/admitad/items/tickets.py similarity index 98% rename from pyadmitad/items/tickets.py rename to admitad/items/tickets.py index 7738418..a34dedb 100644 --- a/pyadmitad/items/tickets.py +++ b/admitad/items/tickets.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = [ diff --git a/pyadmitad/items/websites.py b/admitad/items/websites.py similarity index 99% rename from pyadmitad/items/websites.py rename to admitad/items/websites.py index 60fc2e5..0e53e81 100644 --- a/pyadmitad/items/websites.py +++ b/admitad/items/websites.py @@ -1,7 +1,7 @@ # coding: utf-8 from __future__ import unicode_literals -from pyadmitad.items.base import Item +from admitad.items.base import Item __all__ = ( diff --git a/pyadmitad/tests/__init__.py b/admitad/tests/__init__.py similarity index 100% rename from pyadmitad/tests/__init__.py rename to admitad/tests/__init__.py diff --git a/pyadmitad/tests/base.py b/admitad/tests/base.py similarity index 90% rename from pyadmitad/tests/base.py rename to admitad/tests/base.py index 99bd1d3..7863544 100644 --- a/pyadmitad/tests/base.py +++ b/admitad/tests/base.py @@ -7,7 +7,7 @@ from unittest import TestCase from urllib.parse import urlencode -from pyadmitad.api import get_oauth_client_token +from admitad.api import get_oauth_client_token class BaseTestCase(TestCase): diff --git a/pyadmitad/tests/data/image.png b/admitad/tests/data/image.png similarity index 100% rename from pyadmitad/tests/data/image.png rename to admitad/tests/data/image.png diff --git a/pyadmitad/tests/test_announcements.py b/admitad/tests/test_announcements.py similarity index 96% rename from pyadmitad/tests/test_announcements.py rename to admitad/tests/test_announcements.py index 468825a..991cbaf 100644 --- a/pyadmitad/tests/test_announcements.py +++ b/admitad/tests/test_announcements.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import Announcements -from pyadmitad.tests.base import BaseTestCase +from admitad.items import Announcements +from admitad.tests.base import BaseTestCase class AnnouncementsTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_arecords.py b/admitad/tests/test_arecords.py similarity index 96% rename from pyadmitad/tests/test_arecords.py rename to admitad/tests/test_arecords.py index 7b36ee5..bde6f7f 100644 --- a/pyadmitad/tests/test_arecords.py +++ b/admitad/tests/test_arecords.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import Arecords -from pyadmitad.tests.base import BaseTestCase +from admitad.items import Arecords +from admitad.tests.base import BaseTestCase class ArecordsTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_auxiliary.py b/admitad/tests/test_auxiliary.py similarity index 98% rename from pyadmitad/tests/test_auxiliary.py rename to admitad/tests/test_auxiliary.py index 60bbce6..d0bb1c8 100644 --- a/pyadmitad/tests/test_auxiliary.py +++ b/admitad/tests/test_auxiliary.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.tests.base import BaseTestCase -from pyadmitad.items.auxiliary import WebsiteTypes, WebsiteRegions, \ +from admitad.tests.base import BaseTestCase +from admitad.items.auxiliary import WebsiteTypes, WebsiteRegions, \ SystemLanguages, SystemCurrencies, AdvertiserServices, CampaignCategories diff --git a/pyadmitad/tests/test_banners.py b/admitad/tests/test_banners.py similarity index 94% rename from pyadmitad/tests/test_banners.py rename to admitad/tests/test_banners.py index b1e77d3..5ca1dcd 100644 --- a/pyadmitad/tests/test_banners.py +++ b/admitad/tests/test_banners.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import Banners, BannersForWebsite -from pyadmitad.tests.base import BaseTestCase +from admitad.items import Banners, BannersForWebsite +from admitad.tests.base import BaseTestCase class BannersTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_base.py b/admitad/tests/test_base.py similarity index 98% rename from pyadmitad/tests/test_base.py rename to admitad/tests/test_base.py index 55eaa3b..d914472 100644 --- a/pyadmitad/tests/test_base.py +++ b/admitad/tests/test_base.py @@ -4,9 +4,9 @@ import unittest from datetime import datetime, date -from pyadmitad.items.base import Item -from pyadmitad.tests.base import BaseTestCase -from pyadmitad.constants import BASE_URL +from admitad.items.base import Item +from admitad.tests.base import BaseTestCase +from admitad.constants import BASE_URL class ItemTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_broken_links.py b/admitad/tests/test_broken_links.py similarity index 95% rename from pyadmitad/tests/test_broken_links.py rename to admitad/tests/test_broken_links.py index 8ba03ec..07b1dc6 100644 --- a/pyadmitad/tests/test_broken_links.py +++ b/admitad/tests/test_broken_links.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import BrokenLinks, ManageBrokenLinks -from pyadmitad.tests.base import BaseTestCase +from admitad.items import BrokenLinks, ManageBrokenLinks +from admitad.tests.base import BaseTestCase class BrokenLinksTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_campaigns.py b/admitad/tests/test_campaigns.py similarity index 96% rename from pyadmitad/tests/test_campaigns.py rename to admitad/tests/test_campaigns.py index e3f857a..8f8e3d8 100644 --- a/pyadmitad/tests/test_campaigns.py +++ b/admitad/tests/test_campaigns.py @@ -4,9 +4,9 @@ import unittest import responses -from pyadmitad.items import Campaigns, CampaignsForWebsite, \ +from admitad.items import Campaigns, CampaignsForWebsite, \ CampaignsManage -from pyadmitad.tests.base import BaseTestCase +from admitad.tests.base import BaseTestCase class CampaignsTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_coupons.py b/admitad/tests/test_coupons.py similarity index 97% rename from pyadmitad/tests/test_coupons.py rename to admitad/tests/test_coupons.py index 992fd24..9e13e88 100644 --- a/pyadmitad/tests/test_coupons.py +++ b/admitad/tests/test_coupons.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import Coupons, CouponsForWebsite, CouponsCategories -from pyadmitad.tests.base import BaseTestCase +from admitad.items import Coupons, CouponsForWebsite, CouponsCategories +from admitad.tests.base import BaseTestCase class CouponsTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_deeplinks.py b/admitad/tests/test_deeplinks.py similarity index 90% rename from pyadmitad/tests/test_deeplinks.py rename to admitad/tests/test_deeplinks.py index 7043d20..b086c8e 100644 --- a/pyadmitad/tests/test_deeplinks.py +++ b/admitad/tests/test_deeplinks.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.tests.base import BaseTestCase -from pyadmitad.items import DeeplinksManage +from admitad.tests.base import BaseTestCase +from admitad.items import DeeplinksManage class DeeplinksManageTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_landings.py b/admitad/tests/test_landings.py similarity index 92% rename from pyadmitad/tests/test_landings.py rename to admitad/tests/test_landings.py index 507b62e..a79b14f 100644 --- a/pyadmitad/tests/test_landings.py +++ b/admitad/tests/test_landings.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import Landings, LandingsForWebsite -from pyadmitad.tests.base import BaseTestCase +from admitad.items import Landings, LandingsForWebsite +from admitad.tests.base import BaseTestCase class LandingsTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_links.py b/admitad/tests/test_links.py similarity index 90% rename from pyadmitad/tests/test_links.py rename to admitad/tests/test_links.py index 59fbf5f..a592a91 100644 --- a/pyadmitad/tests/test_links.py +++ b/admitad/tests/test_links.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import LinksValidator -from pyadmitad.tests.base import BaseTestCase +from admitad.items import LinksValidator +from admitad.tests.base import BaseTestCase class LinksValidationTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_lost_orders.py b/admitad/tests/test_lost_orders.py similarity index 93% rename from pyadmitad/tests/test_lost_orders.py rename to admitad/tests/test_lost_orders.py index 2714f32..283d799 100644 --- a/pyadmitad/tests/test_lost_orders.py +++ b/admitad/tests/test_lost_orders.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import LostOrders, LostOrdersManager -from pyadmitad.tests.base import BaseTestCase +from admitad.items import LostOrders, LostOrdersManager +from admitad.tests.base import BaseTestCase class LostOrdersTestCase(BaseTestCase): @@ -55,7 +55,7 @@ def test_create_lost_order(self): status=200, ) result = self.client.LostOrdersManager.create( - attachments=['./pyadmitad/tests/data/image.png'], + attachments=['./admitad/tests/data/image.png'], website=10, campaign=20, order_id='asd3f3', diff --git a/pyadmitad/tests/test_me.py b/admitad/tests/test_me.py similarity index 97% rename from pyadmitad/tests/test_me.py rename to admitad/tests/test_me.py index 70efa32..a263ae1 100644 --- a/pyadmitad/tests/test_me.py +++ b/admitad/tests/test_me.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import Me, Balance, PaymentsSettings -from pyadmitad.tests.base import BaseTestCase +from admitad.items import Me, Balance, PaymentsSettings +from admitad.tests.base import BaseTestCase class MeTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_news.py b/admitad/tests/test_news.py similarity index 97% rename from pyadmitad/tests/test_news.py rename to admitad/tests/test_news.py index e535d2d..d2a537c 100644 --- a/pyadmitad/tests/test_news.py +++ b/admitad/tests/test_news.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import News -from pyadmitad.tests.base import BaseTestCase +from admitad.items import News +from admitad.tests.base import BaseTestCase class AnnouncementsTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_optcodes.py b/admitad/tests/test_optcodes.py similarity index 96% rename from pyadmitad/tests/test_optcodes.py rename to admitad/tests/test_optcodes.py index bb61e28..77e17aa 100644 --- a/pyadmitad/tests/test_optcodes.py +++ b/admitad/tests/test_optcodes.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import OptCodes, OfferStatusOptCodesManager, ActionOptCodesManager -from pyadmitad.tests.base import BaseTestCase +from admitad.items import OptCodes, OfferStatusOptCodesManager, ActionOptCodesManager +from admitad.tests.base import BaseTestCase class OptCodeTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_payments.py b/admitad/tests/test_payments.py similarity index 94% rename from pyadmitad/tests/test_payments.py rename to admitad/tests/test_payments.py index 14c4c00..9dce90c 100644 --- a/pyadmitad/tests/test_payments.py +++ b/admitad/tests/test_payments.py @@ -4,9 +4,9 @@ import unittest import responses -from pyadmitad.items import Payments, PaymentsStatement, PaymentsManage -from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET -from pyadmitad.tests.base import BaseTestCase +from admitad.items import Payments, PaymentsStatement, PaymentsManage +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from admitad.tests.base import BaseTestCase class PaymentsTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_referrals.py b/admitad/tests/test_referrals.py similarity index 95% rename from pyadmitad/tests/test_referrals.py rename to admitad/tests/test_referrals.py index b75b26c..d61425b 100644 --- a/pyadmitad/tests/test_referrals.py +++ b/admitad/tests/test_referrals.py @@ -6,9 +6,9 @@ import responses -from pyadmitad.items import Referrals -from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET -from pyadmitad.tests.base import BaseTestCase +from admitad.items import Referrals +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from admitad.tests.base import BaseTestCase class ReferralsTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_retag.py b/admitad/tests/test_retag.py similarity index 95% rename from pyadmitad/tests/test_retag.py rename to admitad/tests/test_retag.py index 7960a7e..29027a0 100644 --- a/pyadmitad/tests/test_retag.py +++ b/admitad/tests/test_retag.py @@ -4,9 +4,9 @@ import unittest import responses -from pyadmitad.items import Retag, RetagManager -from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET -from pyadmitad.tests.base import BaseTestCase +from admitad.items import Retag, RetagManager +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from admitad.tests.base import BaseTestCase class RetagTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_statistics.py b/admitad/tests/test_statistics.py similarity index 98% rename from pyadmitad/tests/test_statistics.py rename to admitad/tests/test_statistics.py index ec84353..5cd8726 100644 --- a/pyadmitad/tests/test_statistics.py +++ b/admitad/tests/test_statistics.py @@ -4,11 +4,11 @@ import unittest import responses -from pyadmitad.items import StatisticWebsites, StatisticCampaigns,\ +from admitad.items import StatisticWebsites, StatisticCampaigns,\ StatisticDays, StatisticMonths, StatisticActions, StatisticSubIds,\ StatisticSources, StatisticKeywords -from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET -from pyadmitad.tests.base import BaseTestCase +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from admitad.tests.base import BaseTestCase class StatisticWebsitesTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_tickets.py b/admitad/tests/test_tickets.py similarity index 92% rename from pyadmitad/tests/test_tickets.py rename to admitad/tests/test_tickets.py index 333c188..e4d3892 100644 --- a/pyadmitad/tests/test_tickets.py +++ b/admitad/tests/test_tickets.py @@ -4,9 +4,9 @@ import unittest import responses -from pyadmitad.items import Tickets, TicketsManager -from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET -from pyadmitad.tests.base import BaseTestCase +from admitad.items import Tickets, TicketsManager +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from admitad.tests.base import BaseTestCase class TicketsTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_transport.py b/admitad/tests/test_transport.py similarity index 96% rename from pyadmitad/tests/test_transport.py rename to admitad/tests/test_transport.py index a1cae3e..4c83a31 100644 --- a/pyadmitad/tests/test_transport.py +++ b/admitad/tests/test_transport.py @@ -6,12 +6,12 @@ import responses -from pyadmitad.transport import oauth_client_authorization, get_credentials, build_headers, \ +from admitad.transport import oauth_client_authorization, get_credentials, build_headers, \ prepare_request_data, api_request, oauth_refresh_access_token, HttpTransport -from pyadmitad.constants import DEFAULT_REQUEST_TIMEOUT, DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET, \ +from admitad.constants import DEFAULT_REQUEST_TIMEOUT, DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET, \ BASE_URL, TOKEN_URL -from pyadmitad.exceptions import HttpException -from pyadmitad.tests.base import BaseTestCase +from admitad.exceptions import HttpException +from admitad.tests.base import BaseTestCase class BaseTransportTestCase(BaseTestCase): diff --git a/pyadmitad/tests/test_websites.py b/admitad/tests/test_websites.py similarity index 99% rename from pyadmitad/tests/test_websites.py rename to admitad/tests/test_websites.py index 2a2a54d..c95a3d9 100644 --- a/pyadmitad/tests/test_websites.py +++ b/admitad/tests/test_websites.py @@ -4,8 +4,8 @@ import unittest import responses -from pyadmitad.items import Websites, WebsitesManage -from pyadmitad.tests.base import BaseTestCase +from admitad.items import Websites, WebsitesManage +from admitad.tests.base import BaseTestCase class WebsitesTestCase(BaseTestCase): diff --git a/pyadmitad/transport.py b/admitad/transport.py similarity index 97% rename from pyadmitad/transport.py rename to admitad/transport.py index 2b23ec9..849ea09 100644 --- a/pyadmitad/transport.py +++ b/admitad/transport.py @@ -7,9 +7,9 @@ import requests -from pyadmitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET, \ +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET, \ DEFAULT_REQUEST_TIMEOUT, MAX_PAGINATION_LIMIT, TOKEN_URL -from pyadmitad.exceptions import HttpException, ConnectionException, JsonException +from admitad.exceptions import HttpException, ConnectionException, JsonException LOG = logging.getLogger(__file__) LOG.addHandler(logging.StreamHandler()) diff --git a/pyadmitad/items/__init__.py b/pyadmitad/items/__init__.py deleted file mode 100644 index 97b31ff..0000000 --- a/pyadmitad/items/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -from pyadmitad.items.me import * -from pyadmitad.items.websites import * -from pyadmitad.items.auxiliary import * -from pyadmitad.items.announcements import * -from pyadmitad.items.news import * -from pyadmitad.items.links import * -from pyadmitad.items.landings import * -from pyadmitad.items.deeplinks import * -from pyadmitad.items.referrals import * -from pyadmitad.items.payments import * -from pyadmitad.items.coupons import * -from pyadmitad.items.statistics import * -from pyadmitad.items.banners import * -from pyadmitad.items.campaigns import * -from pyadmitad.items.optcodes import * -from pyadmitad.items.lost_orders import * -from pyadmitad.items.arecords import * -from pyadmitad.items.retag import * -from pyadmitad.items.broken_links import * -from pyadmitad.items.tickets import * diff --git a/setup.py b/setup.py index 37a0235..e2837a9 100644 --- a/setup.py +++ b/setup.py @@ -1,8 +1,8 @@ from setuptools import setup setup( - name="pyadmitad", - packages=['pyadmitad'], + name="admitad", + packages=['admitad'], version='1.0.0', author='Admitad Dev Bot', author_email='dev@admitad.com', From 26f4d57fb584e385556a1c2bc1107053cd48b2b5 Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Tue, 8 Nov 2016 12:20:17 +0300 Subject: [PATCH 28/42] update to v1.1 --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index e2837a9..f6a1bdf 100644 --- a/setup.py +++ b/setup.py @@ -3,13 +3,13 @@ setup( name="admitad", packages=['admitad'], - version='1.0.0', + version='1.1.0', author='Admitad Dev Bot', author_email='dev@admitad.com', description='A Python wrapper around the Admitad API', license='MIT', url='https://github.com/admitad/admitad-python-api', - download_url='https://github.com/admitad/admitad-python-api/tarball/1.0', + download_url='https://github.com/admitad/admitad-python-api/tarball/1.1', keywords=['admitad'], install_requires=['requests>=2.0', 'future'], tests_require=['nose2', 'responses'], From ffccce8d7921b00528ddacb9053da78fbd78455a Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Tue, 8 Nov 2016 13:05:53 +0300 Subject: [PATCH 29/42] fix setup.py folder error --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index f6a1bdf..f82f3fc 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ setup( name="admitad", - packages=['admitad'], + packages=['admitad', 'admitad.items'], version='1.1.0', author='Admitad Dev Bot', author_email='dev@admitad.com', From a3a321c42c1d3a3afcb0cd4434e784e47c9928c7 Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Tue, 8 Nov 2016 13:06:16 +0300 Subject: [PATCH 30/42] v1.1.1 --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index f82f3fc..7b1ede8 100644 --- a/setup.py +++ b/setup.py @@ -3,13 +3,13 @@ setup( name="admitad", packages=['admitad', 'admitad.items'], - version='1.1.0', + version='1.1.1', author='Admitad Dev Bot', author_email='dev@admitad.com', description='A Python wrapper around the Admitad API', license='MIT', url='https://github.com/admitad/admitad-python-api', - download_url='https://github.com/admitad/admitad-python-api/tarball/1.1', + download_url='https://github.com/admitad/admitad-python-api/tarball/1.1.1', keywords=['admitad'], install_requires=['requests>=2.0', 'future'], tests_require=['nose2', 'responses'], From 1037556152eb1e810ca110cc40afc73929f67ddf Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Mon, 5 Jun 2017 20:36:04 +0300 Subject: [PATCH 31/42] fix request data bug with PUT and DELETE --- admitad/transport.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/admitad/transport.py b/admitad/transport.py index 849ea09..bb3d1de 100644 --- a/admitad/transport.py +++ b/admitad/transport.py @@ -70,9 +70,9 @@ def prepare_request_data(data=None, headers=None, method='GET', prepared_data = prepare_data(data) - if method == 'POST': + if method in ['POST', 'PUT']: kwargs['data'] = prepared_data - if method == 'GET': + if method in ['GET', 'DELETE']: kwargs['params'] = prepared_data return kwargs From f84ba6664e985c2b7cf545ffc46a0fa6bc7b03aa Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Mon, 5 Jun 2017 20:36:36 +0300 Subject: [PATCH 32/42] add LostOrders appeal_id and appeal_status --- admitad/items/lost_orders.py | 46 +++++++++++++++++++++++++++++++++++- 1 file changed, 45 insertions(+), 1 deletion(-) diff --git a/admitad/items/lost_orders.py b/admitad/items/lost_orders.py index f113c68..384d2b4 100644 --- a/admitad/items/lost_orders.py +++ b/admitad/items/lost_orders.py @@ -20,11 +20,34 @@ class LostOrders(Item): def get(self, **kwargs): """ Args: + campaign (id) + website (id) + status (string) + start_date (date) + end_date (date) + appeal_id (string) + appeal_status (string) limit (int) offset (int) """ - return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + filtering = { + 'filter_by': kwargs, + 'available': { + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'status': lambda x: Item.sanitize_string_value(x, 'status', blank=True), + 'start_date': lambda x: Item.sanitize_string_value(x, 'start_date', blank=True), + 'end_date': lambda x: Item.sanitize_string_value(x, 'end_date', blank=True), + 'appeal_id': lambda x: Item.sanitize_string_value(x, 'appeal_id', blank=True), + 'appeal_status': lambda x: Item.sanitize_string_value(x, 'appeal_status', blank=True), + } + } + + return self.transport.get() \ + .set_filtering(filtering) \ + .set_pagination(**kwargs) \ + .request(url=self.URL) def getOne(self, lost_order_id): """ @@ -46,6 +69,7 @@ class LostOrdersManager(Item): DELETE_URL = Item.prepare_url('lost_orders/%(lost_order_id)s/decline') CREATE_URL = Item.prepare_url('lost_orders/create') + UPDATE_URL = Item.prepare_url('lost_orders/%(lost_order_id)s/update') CREATE_FIELDS = { 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign'), @@ -54,6 +78,7 @@ class LostOrdersManager(Item): 'order_date': lambda x: Item.sanitize_date(x, 'order_date'), 'order_price': lambda x: Item.sanitize_float_value(x, 'order_price'), 'comment': lambda x: Item.sanitize_string_value(x, 'comment'), + 'appeal_id': lambda x: Item.sanitize_string_value(x, 'appeal_id'), } def delete(self, lost_order_id): @@ -78,6 +103,7 @@ def create(self, attachments, **kwargs): order_id (str) order_date (date) order_price (float) + appeal_id (str) comment (str) """ @@ -85,3 +111,21 @@ def create(self, attachments, **kwargs): files = [('attachment', open(item, 'rb')) for item in Item.sanitize_string_array(attachments, 'attachments')] return self.transport.post().set_data(data).set_files(files).request(url=self.CREATE_URL) + + def update(self, lost_order_id, appeal_status): + """ + Args: + lost_order_id (int) + appeal_status (str) + + """ + request_data = { + 'url': self.UPDATE_URL, + 'lost_order_id': Item.sanitize_id(lost_order_id), + } + + data = { + 'appeal_status': self.sanitize_string_value(appeal_status, 'appeal_status'), + } + + return self.transport.put().set_data(data).request(**request_data) From ec7ba74f4fc89637439a37d87bc10d0650bbdf3b Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Mon, 5 Jun 2017 20:36:50 +0300 Subject: [PATCH 33/42] add tests and examples --- README.md | 3 ++- admitad/tests/test_lost_orders.py | 25 ++++++++++++++++++++++--- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 1173efe..aa14f44 100644 --- a/README.md +++ b/README.md @@ -470,7 +470,7 @@ res = client.ActionOptCodesManager.update( ```python res = client.LostOrders.get() -res = client.LostOrders.get(limit=20, offset=0) +res = client.LostOrders.get(limit=20, offset=0, appeal_status='processing') res = client.LostOrders.getOne(76) ``` @@ -483,6 +483,7 @@ res = client.LostOrdersManager.create( order_id='039NRUHFJEW', order_date='12.08.2016', order_price=345.77, comment='some comment' ) +res = client.LostOrdersManager.update(77, appeal_status='resolved') res = client.LostOrdersManager.delete(77) ``` diff --git a/admitad/tests/test_lost_orders.py b/admitad/tests/test_lost_orders.py index 283d799..d7e3170 100644 --- a/admitad/tests/test_lost_orders.py +++ b/admitad/tests/test_lost_orders.py @@ -16,7 +16,8 @@ def test_get_lost_orders_request(self): resp.GET, self.prepare_url(LostOrders.URL, params={ 'limit': 20, - 'offset': 1 + 'offset': 1, + 'appeal_status': 'resolved', }), match_querystring=True, json={'status': 'ok'}, @@ -24,7 +25,8 @@ def test_get_lost_orders_request(self): ) result = self.client.LostOrders.get( limit=20, - offset=1 + offset=1, + appeal_status='resolved' ) self.assertIn('status', result) @@ -61,7 +63,24 @@ def test_create_lost_order(self): order_id='asd3f3', order_date='01.01.2010', order_price=1200, - comment='foo bar baz' + comment='foo bar baz', + appeal_id='foo' + ) + + self.assertIn('status', result) + + def test_update_lost_order(self): + with responses.RequestsMock() as resp: + resp.add( + resp.PUT, + self.prepare_url(LostOrdersManager.UPDATE_URL, lost_order_id=10), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.LostOrdersManager.update( + lost_order_id=10, + appeal_status='resolved' ) self.assertIn('status', result) From f0daf3d1740871156e4ba2e5c7d8bac2d7030f46 Mon Sep 17 00:00:00 2001 From: Ilya Tumash Date: Mon, 5 Jun 2017 20:37:04 +0300 Subject: [PATCH 34/42] v1.2.0 --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 7b1ede8..5c1e859 100644 --- a/setup.py +++ b/setup.py @@ -3,13 +3,13 @@ setup( name="admitad", packages=['admitad', 'admitad.items'], - version='1.1.1', + version='1.2.0', author='Admitad Dev Bot', author_email='dev@admitad.com', description='A Python wrapper around the Admitad API', license='MIT', url='https://github.com/admitad/admitad-python-api', - download_url='https://github.com/admitad/admitad-python-api/tarball/1.1.1', + download_url='https://github.com/admitad/admitad-python-api/tarball/1.2.0', keywords=['admitad'], install_requires=['requests>=2.0', 'future'], tests_require=['nose2', 'responses'], From 7cd1d2a18efa70141fdd28968d71f3bb3579f03a Mon Sep 17 00:00:00 2001 From: Barkholenka Raman Date: Tue, 15 Aug 2017 17:13:46 +0300 Subject: [PATCH 35/42] change link to travis label --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index aa14f44..3998a53 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -admitad-python-api [![Build Status](https://travis-ci.org/janitor/admitad-python-api.svg?branch=master)](https://travis-ci.org/janitor/admitad-python-api) +admitad-python-api [![Build Status](https://travis-ci.org/admitad/admitad-python-api.svg?branch=master)](https://travis-ci.org/admitad/admitad-python-api) ================== A Python wrapper around the [Admitad API](https://developers.admitad.com/en/) From 998131274c78c7e024d27c18ff21c63a9fa746eb Mon Sep 17 00:00:00 2001 From: Mihail Landyuk Date: Thu, 26 Oct 2017 17:36:31 +0300 Subject: [PATCH 36/42] deleted arecord functionlality --- admitad/items/__init__.py | 1 - admitad/items/arecords.py | 41 ------------------- admitad/tests/test_arecords.py | 72 ---------------------------------- 3 files changed, 114 deletions(-) delete mode 100644 admitad/items/arecords.py delete mode 100644 admitad/tests/test_arecords.py diff --git a/admitad/items/__init__.py b/admitad/items/__init__.py index a6a71be..2fd1467 100644 --- a/admitad/items/__init__.py +++ b/admitad/items/__init__.py @@ -14,7 +14,6 @@ from admitad.items.campaigns import * from admitad.items.optcodes import * from admitad.items.lost_orders import * -from admitad.items.arecords import * from admitad.items.retag import * from admitad.items.broken_links import * from admitad.items.tickets import * diff --git a/admitad/items/arecords.py b/admitad/items/arecords.py deleted file mode 100644 index e245de7..0000000 --- a/admitad/items/arecords.py +++ /dev/null @@ -1,41 +0,0 @@ -# coding: utf-8 -from __future__ import unicode_literals - -from admitad.items.base import Item - - -__all__ = [ - 'Arecords' -] - - -class Arecords(Item): - - SCOPE = 'arecords' - - URL = Item.prepare_url('arecords') - FOR_WEBSITE_URL = Item.prepare_url('arecords/%(website_id)s') - - def get(self, **kwargs): - """ - Args: - limit (int) - offset (int) - - """ - - return self.transport.get().set_pagination(**kwargs).request(url=self.URL) - - def getForWebsite(self, website_id, **kwargs): - """ - Args: - website_id (int) - - """ - - request_data = { - 'url': self.FOR_WEBSITE_URL, - 'website_id': Item.sanitize_id(website_id) - } - - return self.transport.get().request(**request_data) diff --git a/admitad/tests/test_arecords.py b/admitad/tests/test_arecords.py deleted file mode 100644 index bde6f7f..0000000 --- a/admitad/tests/test_arecords.py +++ /dev/null @@ -1,72 +0,0 @@ -# coding: utf-8 -from __future__ import unicode_literals - -import unittest -import responses - -from admitad.items import Arecords -from admitad.tests.base import BaseTestCase - - -class ArecordsTestCase(BaseTestCase): - - def test_get_arecords_request(self): - with responses.RequestsMock() as resp: - resp.add( - resp.GET, - self.prepare_url(Arecords.URL, params={ - 'limit': 20, - 'offset': 0 - }), - match_querystring=True, - json={ - 'results': [{ - 'domain': 'somewebsite.ru', - 'website_id': 12, - 'name': 'Some website' - }, { - 'domain': 'mywebsite.kz', - 'website_id': 10, - 'name': 'My website' - }], - '_meta': { - 'limit': 20, - 'offset': 0, - 'count': 2, - } - }, - status=200 - ) - - result = self.client.Arecords.get() - - self.assertEqual(len(result['results']), 2) - - def test_get_single_arecords_request(self): - with responses.RequestsMock() as resp: - resp.add( - resp.GET, - self.prepare_url(Arecords.FOR_WEBSITE_URL, website_id=10), - match_querystring=True, - json={ - 'results': [{ - 'domain': 'mywebsite.kz', - 'website_id': 10, - 'name': 'My website' - }], - '_meta': { - 'limit': 20, - 'offset': 0, - 'count': 1, - } - }, - status=200 - ) - - result = self.client.Arecords.getForWebsite(10) - - self.assertEqual(len(result['results']), 1) - - -if __name__ == '__main__': - unittest.main() From b0336b2c18b4fff042211ab35d7ac42a384e5b0b Mon Sep 17 00:00:00 2001 From: Mihail Landyuk Date: Fri, 27 Oct 2017 10:03:02 +0300 Subject: [PATCH 37/42] deleted arecord from readme file --- README.md | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/README.md b/README.md index 3998a53..2c83f00 100644 --- a/README.md +++ b/README.md @@ -487,16 +487,6 @@ res = client.LostOrdersManager.update(77, appeal_status='resolved') res = client.LostOrdersManager.delete(77) ``` -### Arecords ### - -###### List of arecords ###### - -```python -res = client.Arecords.get() -res = client.Arecords.get(limit=50) -res = client.Arecords.getForWebsite(10) -``` - ### Retag ### ###### List of retag ###### From 260501cdfb60573fd963147cc762d1b6ab1733f4 Mon Sep 17 00:00:00 2001 From: Nikita Zyl Date: Thu, 10 Feb 2022 10:26:54 +0300 Subject: [PATCH 38/42] Add short links --- admitad/items/__init__.py | 1 + admitad/items/short_links.py | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 admitad/items/short_links.py diff --git a/admitad/items/__init__.py b/admitad/items/__init__.py index 2fd1467..ce089d6 100644 --- a/admitad/items/__init__.py +++ b/admitad/items/__init__.py @@ -17,3 +17,4 @@ from admitad.items.retag import * from admitad.items.broken_links import * from admitad.items.tickets import * +from admitad.items.short_links import * diff --git a/admitad/items/short_links.py b/admitad/items/short_links.py new file mode 100644 index 0000000..e797a89 --- /dev/null +++ b/admitad/items/short_links.py @@ -0,0 +1,35 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'ShortLinks' +] + + +class ShortLinks(Item): + + SCOPE = 'short_links' + + URL = Item.prepare_url('short_links') + SINGLE_URL = Item.prepare_url('shortlink/modify/') + + def get(self, **kwargs): + """ + Args: + link (str) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'link': lambda x: Item.sanitize_string_value(x, 'link', blank=True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) From 64dc6674e123f9df62c0c38a503aac7a54616a34 Mon Sep 17 00:00:00 2001 From: Nikita Zyl Date: Thu, 10 Feb 2022 16:09:03 +0300 Subject: [PATCH 39/42] Add short_links --- admitad/items/short_links.py | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/admitad/items/short_links.py b/admitad/items/short_links.py index e797a89..37b5b27 100644 --- a/admitad/items/short_links.py +++ b/admitad/items/short_links.py @@ -13,23 +13,18 @@ class ShortLinks(Item): SCOPE = 'short_links' - URL = Item.prepare_url('short_links') - SINGLE_URL = Item.prepare_url('shortlink/modify/') + URL = Item.prepare_url('shortlink/modify/') - def get(self, **kwargs): + GET_FIELDS = { + 'link': lambda x: Item.sanitize_string_value(x, 'link'), + } + + def post(self, link, **kwargs): """ Args: link (str) """ - filtering = { - 'filter_by': kwargs, - 'available': { - 'link': lambda x: Item.sanitize_string_value(x, 'link', blank=True), - } - } - - return self.transport.get() \ - .set_pagination(**kwargs) \ - .set_filtering(filtering) \ - .request(url=self.URL) + data = Item.sanitize_fields(self.GET_FIELDS, link=link) + + return self.transport.post().set_data(data).request(url=self.URL) From d9c88eb61a74cd4442343fb48e3f77071270cc49 Mon Sep 17 00:00:00 2001 From: Nikita Zyl Date: Thu, 10 Feb 2022 16:13:21 +0300 Subject: [PATCH 40/42] SubID blank true --- admitad/items/deeplinks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/admitad/items/deeplinks.py b/admitad/items/deeplinks.py index ecaa49c..eb34eeb 100644 --- a/admitad/items/deeplinks.py +++ b/admitad/items/deeplinks.py @@ -17,7 +17,7 @@ class DeeplinksManage(Item): CREATE_FIELDS = { 'ulp': lambda x: Item.sanitize_string_array(x, 'ulp'), - 'subid': lambda x: Item.sanitize_string_value(x, 'subid', max_length=30), + 'subid': lambda x: Item.sanitize_string_value(x, 'subid', max_length=30, blank=True), # todo: subid[1-4] } From 9cb2c6447d2ee0c6386959cf61b76f630f767146 Mon Sep 17 00:00:00 2001 From: Nikita Zyl Date: Tue, 15 Feb 2022 13:46:28 +0300 Subject: [PATCH 41/42] Add readme and search param to coupon --- README.md | 9 +++++++++ admitad/items/coupons.py | 3 +++ admitad/items/deeplinks.py | 2 +- 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 2c83f00..878c007 100644 --- a/README.md +++ b/README.md @@ -419,6 +419,15 @@ res = client.LandingsForWebsite.get(10, 22, limit=100) res = client.DeeplinksManage.create(22, 10, ulp='https://admitad.com/some/', subid='AS32djkd31') ``` +### ShortLinks ### + +###### Get short link ###### + +```python +res = client.ShortLinks.post(link='https://ad.admitad.com/g/some/') +res = client.DeeplinksManage.create(22, 10, ulp='https://admitad.com/some/', subid='AS32djkd31') +``` + ### Referrals ### ###### List of referrals ###### diff --git a/admitad/items/coupons.py b/admitad/items/coupons.py index 7f42f23..0590586 100644 --- a/admitad/items/coupons.py +++ b/admitad/items/coupons.py @@ -19,6 +19,7 @@ class CouponsBase(Item): 'campaign_category': lambda x: Item.sanitize_integer_array(x, 'campaign_category', blank=True), 'category': lambda x: Item.sanitize_integer_array(x, 'category', blank=True), 'type': lambda x: Item.sanitize_string_value(x, 'type', blank=True), + 'search': lambda x: Item.sanitize_string_value(x, 'search', blank=True), } @@ -39,6 +40,7 @@ def get(self, **kwargs): campaign (list of int) campaign_category (list of int) category (list of int) + search (str) type (str) limit (int) offset (int) @@ -95,6 +97,7 @@ def get(self, _id, **kwargs): campaign (list of int) campaign_category (list of int) category (list of int) + search (str) type (str) limit (int) offset (int) diff --git a/admitad/items/deeplinks.py b/admitad/items/deeplinks.py index eb34eeb..ecaa49c 100644 --- a/admitad/items/deeplinks.py +++ b/admitad/items/deeplinks.py @@ -17,7 +17,7 @@ class DeeplinksManage(Item): CREATE_FIELDS = { 'ulp': lambda x: Item.sanitize_string_array(x, 'ulp'), - 'subid': lambda x: Item.sanitize_string_value(x, 'subid', max_length=30, blank=True), + 'subid': lambda x: Item.sanitize_string_value(x, 'subid', max_length=30), # todo: subid[1-4] } From 2b10e470099a7e63236173e1fa1053413f293d1a Mon Sep 17 00:00:00 2001 From: Nikita Zyl Date: Tue, 15 Feb 2022 13:49:08 +0300 Subject: [PATCH 42/42] Fix readme --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 878c007..c058c74 100644 --- a/README.md +++ b/README.md @@ -425,7 +425,6 @@ res = client.DeeplinksManage.create(22, 10, ulp='https://admitad.com/some/', sub ```python res = client.ShortLinks.post(link='https://ad.admitad.com/g/some/') -res = client.DeeplinksManage.create(22, 10, ulp='https://admitad.com/some/', subid='AS32djkd31') ``` ### Referrals ###