diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..e7bf449 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,5 @@ +language: python +python: + - 2.7.11 + - 3.5 +script: python setup.py test diff --git a/LICENSE.txt b/LICENSE.txt new file mode 100644 index 0000000..36f376e --- /dev/null +++ b/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) [2016] [Raman Barkholenka] + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index e96a829..c058c74 100644 --- a/README.md +++ b/README.md @@ -1,378 +1,542 @@ -admitad-python-api +admitad-python-api [![Build Status](https://travis-ci.org/admitad/admitad-python-api.svg?branch=master)](https://travis-ci.org/admitad/admitad-python-api) ================== -A Python wrapper around the Admitad API +A Python wrapper around the [Admitad API](https://developers.admitad.com/en/) Install ------- -Dependencies - -* requests -* simplejson - -Install by cloning from the GitHub repo: - - $ git clone git://github.com/trezorg/admitad-python-api.git - $ cd admitad-python-api - $ python setup.py test - $ python setup.py build - $ python setup.py install - - or just - - $ cp -r admitad-python-api/pyadmitad path/to/destination - + pip install admitad Example ------- +```python +from admitad import api, items - from pyadmitad import api - - client_id = "" - client_secret = "" - username = "" - password = "" - scope = "private_data" +client_id = "[client_id]" +client_secret = "[client_secret]" +scope = ' '.join(set([items.Me.SCOPE])) - client = api.get_oauth_password_client( - client_id, - client_secret, - username, - password, - scope - ) +client = api.get_oauth_client_client( + client_id, + client_secret, + scope +) - or already having an access token - - client = api.get_oauth_client(access_token) - - info = client.Me.get() - - scope = "public_data" - - client = api.get_oauth_password_client( - client_id, - client_secret, - username, - password, - scope - ) - - print client.WebsiteTypes.get() - print client.WebsiteTypes.get(limit=2, offset=1) +print(client.Me.get()) +``` +Tests +----- + python setup.py test API Items ------------- ### Me ### - res = client.Me.get() +```python +res = client.Me.get() +``` ### Balance ### - res = client.Balance.get() +```python +res = client.Balance.get() +res = client.Balance.get(extended=True) +``` + + +### PaymentsSettings ### + +```python +res = client.PaymentsSettings.get() +res = client.PaymentsSettings.get(currency='USD') +``` ### Types of websites ### - res = client.WebsiteTypes.get() - res = client.WebsiteTypes.get(limit=2, offset=1) +```python +res = client.WebsiteTypes.get() +res = client.WebsiteTypes.get(limit=2, offset=1) +``` ### Regions of websites ### - res = client.WebsiteRegions.get() - res = client.WebsiteRegions.get(limit=2, offset=1) +```python +res = client.WebsiteRegions.get() +res = client.WebsiteRegions.get(limit=2, offset=1) +``` ### Languages ### - res = client.SystemLanguages.get() - res = client.SystemLanguages.get(limit=2, offset=1) - res = client.SystemLanguages.getOne(code='ru') +```python +res = client.SystemLanguages.get() +res = client.SystemLanguages.get(limit=2, offset=1) +res = client.SystemLanguages.getOne(code='ru') +``` ### Currencies ### - res = client.SystemCurrencies.get() - res = client.SystemCurrencies.get(limit=2, offset=1) +```python +res = client.SystemCurrencies.get() +res = client.SystemCurrencies.get(limit=2, offset=1) +``` ### Advertising services ### - res = client.AdvertiserServices.get() - res = client.AdvertiserServices.get(limit=2, offset=1) - res = client.AdvertiserServices.getOne(_id=2) - res = client.AdvertiserServices.getOne(1) - res = client.AdvertiserServices.getForKind(kind='website') - res = client.AdvertiserServices.getForKind('website') - res = client.AdvertiserServices.getForKindOne(_id=2, kind='website') - res = client.AdvertiserServices.getForKindOne(2, 'website') +```python +res = client.AdvertiserServices.get() +res = client.AdvertiserServices.get(limit=2, offset=1) +res = client.AdvertiserServices.getOne(1) +res = client.AdvertiserServices.getForKind(kind='website') +res = client.AdvertiserServices.getForKindOne(2, kind='website') +``` ### Categories of advertising campaigns ### - res = client.CampaignCategories.get() - res = client.CampaignCategories.get(limit=2, offset=1) - res = client.CampaignCategories.getOne(_id=2) - res = client.CampaignCategories.getOne(2) +```python +res = client.CampaignCategories.get() +res = client.CampaignCategories.get(campaign=10, language='en') +res = client.CampaignCategories.get(limit=2, offset=1) +res = client.CampaignCategories.getOne(2) +``` ### Coupons ## ###### List of coupons ###### - res = client.Coupons.get() - res = client.Coupons.get(order_by=date_start) - res = client.Coupons.get(order_by=-date_end) - res = client.Coupons.get(campaign=1, category=2) - res = client.Coupons.get(campaign=[1, 2], category=2) - res = client.Coupons.getOne(_id=2) - res = client.Coupons.getOne(2) +```python +res = client.Coupons.get() +res = client.Coupons.get(order_by=['date_start', '-name']) +res = client.Coupons.get(order_by='-date_end') +res = client.Coupons.get(campaign=1, category=2) +res = client.Coupons.get(campaign=[1, 2], category=[2, 3]) +res = client.Coupons.getOne(2) +``` ###### List of coupons for a website ###### - res = client.CouponsForWebsite.get(_id=2) - res = client.CouponsForWebsite.get(2) - res = client.CouponsForWebsite.get(2, order_by=date_start) - res = client.CouponsForWebsite.get(2, campaign=1, category=2) - res = client.CouponsForWebsite.get(2, campaign=[1, 2], category=2) - res = client.CouponsForWebsite.getOne(_id=2, c_id=1) - res = client.CouponsForWebsite.getOne(2, 1) +```python +res = client.CouponsForWebsite.get(2) +res = client.CouponsForWebsite.get(2, order_by=date_start) +res = client.CouponsForWebsite.get(2, campaign=1, category=2) +res = client.CouponsForWebsite.get(2, campaign=[1, 2], category=2) +res = client.CouponsForWebsite.getOne(2, 1) +``` + +###### List of coupons categories ###### +```python +res = client.CouponsCategories.get() +res = client.CouponsCategories.get(limit=10, offset=10) +res = client.CouponsCategories.getOne(2) +``` ### Websites ### ##### List of websites ##### - res = client.Websites.get() - res = client.Websites.get(status='new', campaign_status='active') - res = client.Websites.getOne(_id=2) - res = client.Websites.getOne(2) +```python +res = client.Websites.get(limit=10) +res = client.Websites.get(status='new', campaign_status='active') +res = client.Websites.getOne(2) +``` ##### Manage websites ##### ###### Create website ###### - res = client.WebsitesManage.create( - regions=['RU'], - atnd_hits='20', - atnd_visits='10', - name='website1', - language='ru', - site_url='http://site.com', - description='description', - categories=['1', '2'], - kind='website' - ) +```python +res = client.WebsitesManage.create( + name='website1', + kind='website', + language='ru', + adservice=2, + site_url='http://site.com', + description='description', + categories=[1, 2], + regions=['RU'], + atnd_hits=20, + atnd_visits=10, + mailing_targeting=False +) +``` ###### Update website ###### - res = client.WebsitesManage.update(50, name='test', language='de') +```python +res = client.WebsitesManage.update(50, name='test', language='de') +``` ###### Verify website ###### - res = client.WebsitesManage.verify(50) +```python +res = client.WebsitesManage.verify(50) +``` ###### Delete website ###### - res = client.WebsitesManage.delete(50) +```python +res = client.WebsitesManage.delete(50) +``` ### Statistics ### ###### Statistics by websites ###### - res = client.StatisticWebsites.get(website=1, campaign=1) - res = client.StatisticWebsites.get(subid="ADS778") - res = client.StatisticWebsites.get(limit=2) - res = client.StatisticWebsites.get(date_start='01.01.2013') +```python +res = client.StatisticWebsites.get(website=1, campaign=1) +res = client.StatisticWebsites.get(subid="ADS778") +res = client.StatisticWebsites.get(limit=2) +res = client.StatisticWebsites.get(date_start='01.01.2013') +``` ###### Statistics by campaigns ###### - res = client.StatisticCampaigns.get() - res = client.StatisticCampaigns.get(website=1, campaign=1) - res = client.StatisticCampaigns.get(subid="ADS778") - res = client.StatisticCampaigns.get(limit=2) - res = client.StatisticCampaigns.get(date_start='01.01.2013') +```python +res = client.StatisticCampaigns.get() +res = client.StatisticCampaigns.get(website=1, campaign=1) +res = client.StatisticCampaigns.get(subid="ADS778") +res = client.StatisticCampaigns.get(limit=2) +res = client.StatisticCampaigns.get(date_start='01.01.2013') +``` ###### Statistics by days ###### - res = client.StatisticDays.get() - res = client.StatisticDays.get(website=1, campaign=1) - res = client.StatisticDays.get(subid="ADS778") - res = client.StatisticDays.get(limit=2) - res = client.StatisticDays.get(date_start='01.01.2013') +```python +res = client.StatisticDays.get() +res = client.StatisticDays.get(website=1, campaign=1) +res = client.StatisticDays.get(subid="ADS778") +res = client.StatisticDays.get(limit=2) +res = client.StatisticDays.get(date_start='01.01.2013') +``` ###### Statistics by months ###### - res = client.StatisticMonths.get() - res = client.StatisticMonths.get(website=1, campaign=1) - res = client.StatisticMonths.get(subid="ADS778") - res = client.StatisticMonths.get(limit=2) - res = client.StatisticMonths.get(date_start='01.01.2013') +```python +res = client.StatisticMonths.get() +res = client.StatisticMonths.get(website=1, campaign=1) +res = client.StatisticMonths.get(subid="ADS778") +res = client.StatisticMonths.get(limit=2) +res = client.StatisticMonths.get(date_start='01.01.2013') +``` ###### Statistics by actions ###### - res = client.StatisticActions.get() - res = client.StatisticActions.get(date_start='01.01.2013') - res = client.StatisticActions.get(website=1, campaign=1) - res = client.StatisticActions.get(subid="ADS778") - res = client.StatisticActions.get(subid2="ADS778") - res = client.StatisticActions.get(limit=2) +```python +res = client.StatisticActions.get() +res = client.StatisticActions.get(date_start='01.01.2013') +res = client.StatisticActions.get(website=1, campaign=1) +res = client.StatisticActions.get(subid="ADS778") +res = client.StatisticActions.get(subid2="ADS778") +res = client.StatisticActions.get(limit=2) +``` ###### Statistics by sub-ids ###### - res = client.StatisticSubIds.get() - res = client.StatisticSubIds.get(date_start='01.01.2013') - res = client.StatisticSubIds.get(subid="ADS778") - res = client.StatisticSubIds.get(subid1="ADS778", sub_id_number=2) - res = client.StatisticSubIds.get(limit=2) +```python +res = client.StatisticSubIds.get() +res = client.StatisticSubIds.get(date_start='01.01.2013') +res = client.StatisticSubIds.get(subid="ADS778") +res = client.StatisticSubIds.get(subid1="ADS778", sub_id_number=2) +res = client.StatisticSubIds.get(limit=2) +``` ###### Statistics by sources ###### - res = client.StatisticSources.get() - res = client.StatisticSources.get(date_start='01.01.2013') - res = client.StatisticSources.get(limit=2) +```python +res = client.StatisticSources.get() +res = client.StatisticSources.get(date_start='01.01.2013') +res = client.StatisticSources.get(limit=2) +``` ###### Statistics by keywords ###### - res = client.StatisticKeywords.get() - res = client.StatisticKeywords.get(date_start='01.01.2013') - res = client.StatisticKeywords.get(limit=2) +```python +res = client.StatisticKeywords.get() +res = client.StatisticKeywords.get(date_start='01.01.2013') +res = client.StatisticKeywords.get(limit=2) +``` ### Referrals ### - res = client.Referrals.get() - res = client.Referrals.get(limit=2) - res = client.Referrals.getOne(_id=2) - res = client.Referrals.getOne(2) +```python +res = client.Referrals.get() +res = client.Referrals.get(limit=2) +res = client.Referrals.getOne(2) +``` ### Banners ### ###### List of banners ###### - res = client.Banners.get(_id=2) - res = client.Banners.get(2) - res = client.Banners.get(2, limit=2) - +```python +res = client.Banners.get(2) +res = client.Banners.get(2, mobile_content=False, limit=2) +``` ###### List of banners for a website ###### - res = client.BannersForWebsite.get(_id=2, w_id=3) - res = client.BannersForWebsite.get(2, 3) - res = client.BannersForWebsite.get(2, 3, limit=5) +```python +res = client.BannersForWebsite.get(_id=2, w_id=3) +res = client.BannersForWebsite.get(2, 3) +res = client.BannersForWebsite.get(2, 3, uri_scheme='https', limit=5) +``` ### Campaigns ### ###### List of campaigns ###### - res = client.Campaigns.get() - res = client.Campaigns.get(limit=2) - res = client.Campaigns.getOne(2) +```python +res = client.Campaigns.get() +res = client.Campaigns.get(limit=2) +res = client.Campaigns.getOne(2) +``` ###### List of campaigns for a website ###### - res = client.CampaignsForWebsite.get(22) - res = client.CampaignsForWebsite.get(limit=2) - res = client.CampaignsForWebsite.getOne(6, 22) +```python +res = client.CampaignsForWebsite.get(22) +res = client.CampaignsForWebsite.get(limit=2) +res = client.CampaignsForWebsite.getOne(6, 22) +``` ###### Manage campaigns ###### - res = client.CampaignsManage.connect(6, 22) - res = client.CampaignsManage.connect(c_id=6, w_id=22) - res = client.CampaignsManage.disconnect(6, 22) - res = client.CampaignsManage.disconnect(c_id=6, w_id=22) +```python +res = client.CampaignsManage.connect(6, 22) +res = client.CampaignsManage.connect(c_id=6, w_id=22) +res = client.CampaignsManage.disconnect(6, 22) +res = client.CampaignsManage.disconnect(c_id=6, w_id=22) +``` -### Products ### +### Payments ### -###### Categories of products ###### - res = client.ProductCategories.get() - res = client.ProductCategories.get(limit=1, order_by=-name) - res = client.ProductCategories.getOne(2) +###### List of payment ###### +```python +res = client.Payments.get() +res = client.Payments.get(limit=2, has_statement=True) +res = client.Payments.getOne(2) +``` -###### Vendors of products ###### +###### Payments statement ###### - res = client.ProductVendors.get() - res = client.ProductVendors.get(limit=1, order_by=-name) - res = client.ProductVendors.getOne(2) +```python +res = client.PaymentsStatement.get(12) +res = client.PaymentsStatement.get(12, detailed=True) +``` +###### Manage payments ###### -###### Campaigns with products ###### +```python +res = client.PaymentsManage.create('USD') +res = client.PaymentsManage.confirm(71) +res = client.PaymentsManage.delete(71) +``` - res = client.ProductCampaigns.get(22) - res = client.ProductCampaigns.get(22, limit=1, order_by=-name) - res = client.ProductCampaigns.getOne(22, 6) +### Broken links ### +###### List of broken links ###### -###### Products for website ###### +```python +res = client.BrokenLinks.get() +res = client.BrokenLinks.get(website=[10, 20], date_start='01.01.2010') +res = client.BrokenLinks.getOne(10) +``` - res = client.Products.get(22) - res = client.Products.get(22, limit=1) - res = client.Products.get(22, limit=1, order_by=-price) - res = client.Products.get(22, price_from=1000) - res = client.ProductCampaigns.getOne(22, 2) +###### Manage broken links ###### +```python +res = client.ManageBrokenLinks.resolve(10) +res = client.ManageBrokenLinks.resolve([10, 11, 12]) +``` ### Announcements ### +###### List of annouuncements ###### -###### List of announcements ###### +```python +res = client.Announcements.get() +res = client.Announcements.getOne(10) +``` - res = client.Announcements.get() - res = client.Announcements.get(limit=1, offset=2) - res = client.Announcements.getOne(2) +### News ### -###### Manage announcements ###### +###### List of news ###### - res = client.AnnouncementsManage.delete(12) +```python +res = client.News.get() +res = client.News.get(limit=10, offset=20) +res = client.News.getOne(10) +``` +### Links validator ### -### Payments ### +###### Validate link ###### +```python +res = client.LinksValidator.get('https://admitad.com/some_url/') +``` -###### List of payment ###### +### Landings ### - res = client.Payments.get() - res = client.Payments.get(limit=2) - res = client.Payments.getOne(2) +###### List of landings ###### -###### Manage payments ###### +```python +res = client.Landings.get(10) +res = client.Landings.get(10, limit=100) +``` + +###### List of landings for website ###### - res = client.PaymentsManage.create('USD') - res = client.PaymentsManage.confirm(71) - res = client.PaymentsManage.delete(71) +```python +res = client.LandingsForWebsite.get(10, 22) +res = client.LandingsForWebsite.get(10, 22, limit=100) +``` +### Deeplinks ### -### Money transfers ### +###### Create deeplink ###### +```python +res = client.DeeplinksManage.create(22, 10, ulp='https://admitad.com/some/', subid='AS32djkd31') +``` -###### List of money transfers ###### +### ShortLinks ### - res = client.MoneyTransfers.get() - res = client.MoneyTransfers.get(limit=2) - res = client.MoneyTransfers.getOne(2) - res = client.MoneyTransfers.get(sender='sender') - res = client.MoneyTransfers.get(currency='USD') +###### Get short link ###### -###### Manage money transfers ###### +```python +res = client.ShortLinks.post(link='https://ad.admitad.com/g/some/') +``` - res = client.MoneyTransfersManage.create( - currency='USD', - recipient='recipient', - comment='comment', - sum=10) +### Referrals ### +###### List of referrals ###### + +```python +res = client.Referrals.get() +res = client.Referrals.get(date_start='01.01.2010', date_end=datetime.today()) +res = client.Referrals.getOne(181) +``` + +### Optcodes ### + +###### List of opt-codes ###### + +```python +res = client.OptCodes.get() +res = client.OptCodes.get(campaign=100, order_by=['method', 'desc_mode') +res = client.OptCodes.getOne(11) +``` + +###### Offer status opt-codes manager ###### + +```python +res = client.OfferStatusOptCodesManager.create( + website=10, campaign=100, desc_mode=0, method=l, + url='https://admitad.com/foobarbaz/' +) +res = client.OfferStatusOptCodesManager.update( + desc_mode=1, method=1 +) +``` + +###### Action opt-codes manager ###### + +```python +res = client.ActionOptCodesManager.create( + website=10, campaign=100, desc_mode=0, method=l, + url='https://admitad.com/foobarbaz/', + action_type=0, status=1 +) +res = client.ActionOptCodesManager.update( + desc_mode=1, method=1, action_type=1, status=2 +) +``` + +### Lost orders ### + +###### List of lost orders ###### + +```python +res = client.LostOrders.get() +res = client.LostOrders.get(limit=20, offset=0, appeal_status='processing') +res = client.LostOrders.getOne(76) +``` + +###### Lost orders manager ###### + +```python +res = client.LostOrdersManager.create( + attachments=['/home/user/f.png', '/home/user/s.png'], + campaign=100, website=10, + order_id='039NRUHFJEW', order_date='12.08.2016', order_price=345.77, + comment='some comment' +) +res = client.LostOrdersManager.update(77, appeal_status='resolved') +res = client.LostOrdersManager.delete(77) +``` + +### Retag ### + +###### List of retag ###### + +```python +res = client.Retag.get() +res = client.Retag.get(website=10, active=False, limit=50) +res = client.Retag.getOne(54) +res = client.Retag.getLevelsForWebsite(10) +res = client.Retag.getLevelsForCampaign(100) +``` + +###### Retag manager ###### + +```python +res = client.RetagManager.create( + website=10, level=22, active=False, + script='some js script', comment='some comment' +) +res = client.RetagManager.update(16, level=10, active=True) +res = client.RetagManager.delete(88) +``` + +### Tickets ### + +###### List of tickets ###### + +```python +res = client.Tickets.get() +res = client.Tickets.get(date_start='01.01.2016', status=0) +res = client.Tickets.getOne(50) +``` + +###### Ticket manager ###### + +```python +res = client.TicketsManager.create( + subject='subject', text='some text', + campaign=100, category=27, priority=0, +) +res = client.TicketsManager.comment(12, text='some comment') +``` Notes ------ @@ -380,5 +544,7 @@ Notes It is possible to override the default response handler by passing handler as a keyword argument to a client function call. For example: - func = lambda x: (x, x) - result = client.Me.get(handler=func) +```python +func = lambda x: (x, x) +result = client.Me.get(handler=func) +``` diff --git a/pyadmitad/__init__.py b/admitad/__init__.py similarity index 100% rename from pyadmitad/__init__.py rename to admitad/__init__.py diff --git a/admitad/api.py b/admitad/api.py new file mode 100644 index 0000000..fd09bd9 --- /dev/null +++ b/admitad/api.py @@ -0,0 +1,26 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad import client, transport + + +def get_oauth_client_token(access_token, user_agent=None, debug=False): + """ + Creates a client using an access token. + + """ + http_transport = transport.HttpTransport(access_token, user_agent=user_agent, debug=debug) + return client.Client(http_transport) + + +def get_oauth_client_client(client_id, client_secret, scopes, user_agent=None, debug=False): + """ + Creates a client using a client_id and client_secret. + + """ + auth = transport.oauth_client_authorization({ + 'client_id': client_id, + 'client_secret': client_secret, + 'scopes': scopes + }) + return get_oauth_client_token(auth['access_token'], user_agent=user_agent, debug=debug) diff --git a/pyadmitad/client.py b/admitad/client.py similarity index 54% rename from pyadmitad/client.py rename to admitad/client.py index c2c2106..8253cda 100644 --- a/pyadmitad/client.py +++ b/admitad/client.py @@ -1,13 +1,7 @@ -from pyadmitad import items +# coding: utf-8 +from __future__ import unicode_literals - -class FailedRequest(Exception): - - def __init__(self, error): - self.error = error - - def __str__(self): - return repr(self.error) +from admitad import items class Client(object): diff --git a/admitad/constants.py b/admitad/constants.py new file mode 100644 index 0000000..0953fd7 --- /dev/null +++ b/admitad/constants.py @@ -0,0 +1,23 @@ +# coding: utf-8 +from __future__ import unicode_literals + +# API date-format +DATE_FORMAT = '%d.%m.%Y' +LONG_DATE_FORMAT = '%d.%m.%Y %H:%M:%S' + +SUPPORTED_LANGUAGES = ('ru', 'en', 'de', 'pl', 'es', 'tr') + +# default values +DEFAULT_REQUEST_TIMEOUT = 60 +DEFAULT_LANGUAGE = 'ru' +DEFAULT_PAGINATION_LIMIT = 20 +DEFAULT_PAGINATION_OFFSET = 0 + +# constants +MAX_PAGINATION_LIMIT = 500 +MAX_SUB_ID_LENGTH = 250 + +# urls +BASE_URL = 'https://api.admitad.com/' +AUTHORIZE_URL = '%s%s' % (BASE_URL, 'authorize/') +TOKEN_URL = '%s%s' % (BASE_URL, 'token/') diff --git a/pyadmitad/exceptions.py b/admitad/exceptions.py similarity index 81% rename from pyadmitad/exceptions.py rename to admitad/exceptions.py index b1de13c..91e4c14 100644 --- a/pyadmitad/exceptions.py +++ b/admitad/exceptions.py @@ -1,8 +1,12 @@ +# coding: utf-8 +from __future__ import unicode_literals class HttpException(Exception): def __init__(self, status, message, content): + super(HttpException, self).__init__() + self.status = status self.message = message self.content = content @@ -19,6 +23,8 @@ def __repr__(self): class ConnectionException(Exception): def __init__(self, content): + super(ConnectionException, self).__init__() + self.content = content def __str__(self): @@ -31,6 +37,8 @@ def __repr__(self): class JsonException(Exception): def __init__(self, content): + super(JsonException, self).__init__() + self.content = content def __str__(self): @@ -43,6 +51,8 @@ def __repr__(self): class ApiException(Exception): def __init__(self, content): + super(ApiException, self).__init__() + self.content = content def __str__(self): diff --git a/admitad/items/__init__.py b/admitad/items/__init__.py new file mode 100644 index 0000000..ce089d6 --- /dev/null +++ b/admitad/items/__init__.py @@ -0,0 +1,20 @@ +from admitad.items.me import * +from admitad.items.websites import * +from admitad.items.auxiliary import * +from admitad.items.announcements import * +from admitad.items.news import * +from admitad.items.links import * +from admitad.items.landings import * +from admitad.items.deeplinks import * +from admitad.items.referrals import * +from admitad.items.payments import * +from admitad.items.coupons import * +from admitad.items.statistics import * +from admitad.items.banners import * +from admitad.items.campaigns import * +from admitad.items.optcodes import * +from admitad.items.lost_orders import * +from admitad.items.retag import * +from admitad.items.broken_links import * +from admitad.items.tickets import * +from admitad.items.short_links import * diff --git a/admitad/items/announcements.py b/admitad/items/announcements.py new file mode 100644 index 0000000..356716c --- /dev/null +++ b/admitad/items/announcements.py @@ -0,0 +1,61 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'Announcements', +] + + +class Announcements(Item): + """ + List of announcements + + """ + + SCOPE = 'announcements' + + URL = Item.prepare_url('announcements') + SINGLE_URL = Item.prepare_url('announcements/%(announcement_id)s') + + def get(self, **kwargs): + """ + Args: + limit (int) + offset (int) + language (str) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'language': lambda x: Item.sanitize_string_value(x, 'language', 2, 2, True), + } + } + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, _id, **kwargs): + """ + Args: + _id (int) + language (str) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'language': lambda x: Item.sanitize_string_value(x, 'language', 2, 2, True), + } + } + + request_data = { + 'url': self.SINGLE_URL, + 'announcement_id': Item.sanitize_id(_id) + } + + return self.transport.get().set_filtering(filtering).request(**request_data) diff --git a/admitad/items/auxiliary.py b/admitad/items/auxiliary.py new file mode 100644 index 0000000..08fc9a5 --- /dev/null +++ b/admitad/items/auxiliary.py @@ -0,0 +1,236 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = ( + 'WebsiteTypes', + 'WebsiteRegions', + 'SystemLanguages', + 'SystemCurrencies', + 'AdvertiserServices', + 'CampaignCategories', +) + + +class WebsiteTypes(Item): + """ + List of websites types + + """ + + SCOPE = 'public_data' + + URL = Item.prepare_url('websites/kinds') + + def get(self, **kwargs): + """ + Args: + limit (int) + offset (int) + + """ + + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + + +class WebsiteRegions(Item): + """ + List of websites regions + + """ + + SCOPE = 'public_data' + + URL = Item.prepare_url('websites/regions') + + def get(self, **kwargs): + """ + Args: + limit (int) + offset (int) + + """ + + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + + +class SystemLanguages(Item): + """ + List of system languages + + """ + + SCOPE = 'public_data' + + URL = Item.prepare_url('languages') + SINGLE_URL = Item.prepare_url('languages/%(code)s') + + def get(self, **kwargs): + """ + Args: + limit (int) + offset (int) + + """ + + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + + def getOne(self, code='ru'): + """ + Args: + code (str) + + """ + + request_data = { + 'url': self.SINGLE_URL, + 'code': Item.sanitize_string_value(code, 'code', 2, 2, False) + } + + return self.transport.get().request(**request_data) + + +class SystemCurrencies(Item): + """ + List of system currencies + + """ + + SCOPE = 'public_data' + + URL = Item.prepare_url('currencies') + + def get(self, **kwargs): + """ + Args: + limit (int) + offset (int) + + """ + + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + + +class AdvertiserServices(Item): + """ + List of advertiser services + + """ + + SCOPE = 'public_data' + + URL = Item.prepare_url('adservices') + SINGLE_URL = Item.prepare_url('adservices/%(id)s') + KIND_URL = Item.prepare_url('adservices/kind/%(kind)s') + KIND_SINGLE_URL = Item.prepare_url('adservices/%(id)s/kind/%(kind)s') + + def get(self, **kwargs): + """ + Args: + limit (int) + offset (int) + + """ + + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + + def getOne(self, _id, **kwargs): + """ + Args: + _id (int) + + """ + data = { + 'url': self.SINGLE_URL, + 'id': Item.sanitize_id(_id), + } + + return self.transport.get().request(**data) + + def getForKind(self, kind=None, **kwargs): + """ + Args: + kind (str) + limit (int) + offset (int) + + """ + request_data = { + 'url': self.KIND_URL, + 'kind': self.sanitize_non_blank_value(kind, 'kind'), + } + + return self.transport.get().set_pagination(**kwargs).request(**request_data) + + def getForKindOne(self, _id, kind, **kwargs): + """ + Args: + _id (int) + kind (str) + + """ + request_data = { + 'url': self.KIND_SINGLE_URL, + 'id': self.sanitize_id(_id), + 'kind': self.sanitize_non_blank_value(kind, 'kind'), + } + + return self.transport.get().request(**request_data) + + +class CampaignCategories(Item): + """ + List of campaigns categories + + """ + + SCOPE = 'public_data' + + ORDERING = ('name',) + + URL = Item.prepare_url('categories') + SINGLE_URL = Item.prepare_url('categories/%(id)s') + + def get(self, **kwargs): + """ + Args: + campaign (list of int) + language (str) + order_by (str) + limit (int) + offset (int) + + """ + ordering = { + 'order_by': kwargs.get('order_by', None), + 'available': self.ORDERING + } + + filtering = { + 'filter_by': kwargs, + 'available': { + 'campaign': lambda x: Item.sanitize_integer_array(x, 'campaign', True), + 'language': lambda x: Item.sanitize_string_value(x, 'language', 2, 2, True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, _id, **kwargs): + """ + Args: + _id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**request_data) diff --git a/admitad/items/banners.py b/admitad/items/banners.py new file mode 100644 index 0000000..dc853fe --- /dev/null +++ b/admitad/items/banners.py @@ -0,0 +1,95 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'Banners', + 'BannersForWebsite', +] + + +class Banners(Item): + """ + List of banners + + """ + + SCOPE = 'banners' + + URL = Item.prepare_url('banners/%(campaign_id)s') + + def get(self, _id, **kwargs): + """ + Here _id is an id of advertising campaign + + Args: + _id (int) + mobile_content (bool) + limit (int) + offset(int) + + """ + request_data = { + 'url': self.URL, + 'campaign_id': Item.sanitize_id(_id) + } + + filtering = { + 'filter_by': kwargs, + 'available': { + 'mobile_content': lambda x: Item.sanitize_bool_value(x, blank=True) + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(**request_data) + + +class BannersForWebsite(Item): + """ + List of banners for the website + + """ + + SCOPE = 'banners_for_website' + + URL = Item.prepare_url('banners/%(campaign_id)s/website/%(website_id)s') + + def get(self, _id, w_id, **kwargs): + """ + Here _id is an id of advertising campaign and + w_id is a id of website + + Args: + _id (int) + w_id (int) + mobile_content (bool) + landing (int) + uri_scheme (str) + limit (int) + offset (int) + + """ + request_data = { + 'url': self.URL, + 'campaign_id': Item.sanitize_id(_id), + 'website_id': Item.sanitize_id(w_id) + } + + filtering = { + 'filter_by': kwargs, + 'available': { + 'mobile_content': lambda x: Item.sanitize_bool_value(x, blank=True), + 'landing': lambda x: Item.sanitize_integer_value(x, 'landing', blank=True), + 'uri_scheme': lambda x: Item.sanitize_string_value(x, 'uri_scheme', blank=True) + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(**request_data) diff --git a/admitad/items/base.py b/admitad/items/base.py new file mode 100644 index 0000000..6d8f2ec --- /dev/null +++ b/admitad/items/base.py @@ -0,0 +1,166 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from future import standard_library +standard_library.install_aliases() + +from future.builtins import int, str +from datetime import datetime, date +from urllib.parse import urljoin + +from admitad.constants import BASE_URL, DATE_FORMAT, LONG_DATE_FORMAT + + +class Item(object): + + def __init__(self, transport): + self.transport = transport + self.transport.clean_data() + + @staticmethod + def sanitize_fields(fields, **kwargs): + return {key: func(kwargs.get(key, None)) for (key, func) in fields.items()} + + @staticmethod + def sanitize_id(_id, name='_id'): + if _id == 0: + raise ValueError('Invalid value for `id`: %s' % (_id)) + return Item.sanitize_integer_value(_id, name, False) + + @staticmethod + def sanitize_non_blank_value(value, name): + if value in [[], {}, (), '', None]: + raise ValueError("Invalid non-blank value '%s': %s" % (name, value)) + return value + + @staticmethod + def sanitize_string_value(value, name, max_length=None, min_length=None, blank=False): + if not value: + if not blank: + raise ValueError("Invalid string value '%s': %s. Cannot be blank." % + (name, value)) + return value + if max_length and len(value) > max_length: + raise ValueError("Invalid string value '%s': %s. Max length: %s" % + (name, value, max_length)) + if min_length and len(value) < min_length: + raise ValueError("Invalid string value '%s': %s. Min length: %s" % + (name, value, min_length)) + return value + + @staticmethod + def sanitize_integer_value(value, name, blank=False): + if value is None: + if not blank: + raise ValueError("Blank integer value '%s': %s" % (name, value)) + return value + if isinstance(value, int): + return value + elif isinstance(value, str) and value.isdigit(): + return value + raise ValueError("Invalid integer value '%s': %s" % (name, value)) + + @staticmethod + def sanitize_float_value(value, name, blank=False): + if value is None: + if not blank: + raise ValueError("Blank float value '%s': %s" % (name, value)) + return value + if isinstance(value, (float, int)): + return value + elif isinstance(value, str): + try: + float(value) + return value + except ValueError: + pass + raise ValueError("Invalid float value '%s': %s" % (name, value)) + + @staticmethod + def sanitize_integer_array(values, name, blank=False): + if not values: + if not blank: + raise ValueError("Blank integer values '%s': %s" % (name, values)) + return values + if not isinstance(values, (list, tuple, set)): + values = [values] + return [Item.sanitize_integer_value(x, name, blank=blank) + for x in values] + + @staticmethod + def sanitize_string_array(values, name, max_length=None, min_length=None, blank=False): + if not values: + if not blank: + raise ValueError("Blank string values '%s': %s" % (name, values)) + return values + if not isinstance(values, (list, tuple, set)): + values = [values] + return [Item.sanitize_string_value(x, name, max_length=max_length, min_length=min_length, blank=blank) + for x in values] + + @staticmethod + def sanitize_bool_value(value, name='', blank=False): + if value is None: + if not blank: + raise ValueError("Blank bool value '%s': %s" % (name, value)) + return False + return str(bool(value)).lower() + + @staticmethod + def sanitize_bool_integer_value(value, name='', blank=False): + if value is None: + if not blank: + raise ValueError("Blank bool value '%s': %s" % (name, value)) + return False + return 1 if value else 0 + + @staticmethod + def sanitize_currency_value(value, blank=True): + if not value: + if not blank: + raise ValueError("Blank currency value: %s" % value) + return value + if not len(value) == 3: + raise ValueError("Invalid currency value: %s" % value) + return value.upper() + + @staticmethod + def sanitize_date(value, name, blank=False): + if value is None: + if not blank: + raise ValueError("Blank date value for '%s'" % name) + return None + if isinstance(value, datetime): + return value.date().strftime(DATE_FORMAT) + elif isinstance(value, date): + return value.strftime(DATE_FORMAT) + elif isinstance(value, str): + try: + datetime.strptime(value, DATE_FORMAT) + except ValueError: + raise ValueError("Invalid date: %s" % value) + return value + raise ValueError("Invalid date: %s" % value) + + @staticmethod + def sanitize_long_date(value, name, blank=False): + if value is None: + if not blank: + raise ValueError("Blank date value for '%s'" % name) + return None + if isinstance(value, datetime): + return value.strftime(LONG_DATE_FORMAT) + elif isinstance(value, str): + try: + datetime.strptime(value, LONG_DATE_FORMAT) + except ValueError: + raise ValueError("Invalid date: %s" % value) + return value + raise ValueError("Invalid date: %s" % value) + + @staticmethod + def prepare_url(path): + url = urljoin(BASE_URL, path) + if not url.endswith('/'): + url += '/' + return url diff --git a/admitad/items/broken_links.py b/admitad/items/broken_links.py new file mode 100644 index 0000000..750aaba --- /dev/null +++ b/admitad/items/broken_links.py @@ -0,0 +1,88 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'BrokenLinks', + 'ManageBrokenLinks' +] + + +class BrokenLinks(Item): + + SCOPE = 'broken_links' + + URL = Item.prepare_url('broken_links') + SINGLE_URL = Item.prepare_url('broken_links/%(broken_link_id)s') + + def get(self, **kwargs): + """ + Args: + website (list of int) + campaign (list of int) + search (str) + reason (int) + date_start (date) + date_end (date) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'website': lambda x: Item.sanitize_integer_array(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_array(x, 'campaign', blank=True), + 'search': lambda x: Item.sanitize_string_value(x, 'search', blank=True), + 'reason': lambda x: Item.sanitize_integer_value(x, 'reason', blank=True), + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, broken_link_id): + """ + Args: + broken_link_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'broken_link_id': Item.sanitize_id(broken_link_id) + } + + return self.transport.get().request(**request_data) + + +class ManageBrokenLinks(Item): + + SCOPE = 'manage_broken_links' + + RESOLVE_URL = Item.prepare_url('broken_links/resolve') + + def resolve(self, broken_link_ids): + """ + Args: + broken_links_ids (list of int) + + """ + + filtering = { + 'filter_by': { + 'link_id': broken_link_ids + }, + 'available': { + 'link_id': lambda x: Item.sanitize_integer_array(x, 'link_id', blank=True) + } + } + + return self.transport.post() \ + .set_filtering(filtering) \ + .request(url=self.RESOLVE_URL) diff --git a/admitad/items/campaigns.py b/admitad/items/campaigns.py new file mode 100644 index 0000000..9633c6c --- /dev/null +++ b/admitad/items/campaigns.py @@ -0,0 +1,156 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'Campaigns', + 'CampaignsForWebsite', + 'CampaignsManage', +] + + +class Campaigns(Item): + """ + List of advertising campaigns + + """ + + SCOPE = 'advcampaigns' + + URL = Item.prepare_url('advcampaigns') + SINGLE_URL = Item.prepare_url('advcampaigns/%(campaign_id)s') + + def get(self, **kwargs): + """ + Args: + website (int) + has_tool (list of str) + limit (int) + offset (int) + language (str) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'has_tool': lambda x: Item.sanitize_string_array(x, 'has_tool', blank=True), + 'language': lambda x: Item.sanitize_string_value(x, 'language', blank=True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, _id, **kwargs): + """ + Here _id is an a campaign id + + Args: + _id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'campaign_id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**request_data) + + +class CampaignsForWebsite(Item): + """ + List of advertising campaigns for a website + + """ + + SCOPE = 'advcampaigns_for_website' + + URL = Item.prepare_url('advcampaigns/website/%(website_id)s') + SINGLE_URL = Item.prepare_url('advcampaigns/%(campaign_id)s/website/%(website_id)s') + + def get(self, _id, **kwargs): + """ + Here _id is a website id + + Args: + _id (int) + limit (int) + offset (int) + + """ + request_data = { + 'url': self.URL, + 'website_id': Item.sanitize_id(_id) + } + + return self.transport.get().set_pagination(**kwargs).request(**request_data) + + def getOne(self, _id, c_id, **kwargs): + """ + Here _id is a website id and c_id is a campaign id + + Args: + _id (int) + c_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'website_id': Item.sanitize_id(_id), + 'campaign_id': Item.sanitize_id(c_id) + } + + return self.transport.get().request(**request_data) + + +class CampaignsManage(Item): + """ + Manage an advertising campaign + + """ + + SCOPE = 'manage_advcampaigns' + + CONNECT_URL = Item.prepare_url('advcampaigns/%(campaign_id)s/attach/%(website_id)s') + DISCONNECT_URL = Item.prepare_url('advcampaigns/%(campaign_id)s/detach/%(website_id)s') + + def connect(self, c_id, w_id, **kwargs): + """ + Connect an advertising campaign for a website + Here w_id is a website id and c_id is a campaign id + + Args: + c_id (int) + w_id (int) + + """ + request_data = { + 'url': self.CONNECT_URL, + 'campaign_id': Item.sanitize_id(c_id), + 'website_id': Item.sanitize_id(w_id) + } + + return self.transport.post().request(**request_data) + + def disconnect(self, c_id, w_id, **kwargs): + """ + Disconnect an advertising campaign from a website + Here w_id is a website id and c_id is a campaign id + + Args: + c_id (int) + w_id (int) + + """ + request_data = { + 'url': self.DISCONNECT_URL, + 'campaign_id': Item.sanitize_id(c_id), + 'website_id': Item.sanitize_id(w_id) + } + + return self.transport.post().request(**request_data) diff --git a/admitad/items/coupons.py b/admitad/items/coupons.py new file mode 100644 index 0000000..0590586 --- /dev/null +++ b/admitad/items/coupons.py @@ -0,0 +1,173 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'Coupons', + 'CouponsForWebsite', + 'CouponsCategories', +] + + +class CouponsBase(Item): + + ORDERING = ('name', 'date_start', 'date_end', 'rating',) + FILTERING = { + 'campaign': lambda x: Item.sanitize_integer_array(x, 'campaign', blank=True), + 'campaign_category': lambda x: Item.sanitize_integer_array(x, 'campaign_category', blank=True), + 'category': lambda x: Item.sanitize_integer_array(x, 'category', blank=True), + 'type': lambda x: Item.sanitize_string_value(x, 'type', blank=True), + 'search': lambda x: Item.sanitize_string_value(x, 'search', blank=True), + } + + +class Coupons(CouponsBase): + """ + List of coupons + + """ + + SCOPE = 'coupons' + + URL = Item.prepare_url('coupons') + SINGLE_URL = Item.prepare_url('coupons/%(coupon_id)s') + + def get(self, **kwargs): + """ + Args: + campaign (list of int) + campaign_category (list of int) + category (list of int) + search (str) + type (str) + limit (int) + offset (int) + order_by (str) + + """ + filtering = { + 'filter_by': kwargs, + 'available': self.FILTERING + } + + ordering = { + 'order_by': kwargs.get('order_by', None), + 'available': self.ORDERING + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, _id, **kwargs): + """ + Args: + _id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'coupon_id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**request_data) + + +class CouponsForWebsite(CouponsBase): + """ + List of the website coupons + + """ + + SCOPE = 'coupons_for_website' + + URL = Item.prepare_url('coupons/website/%(website_id)s') + SINGLE_URL = Item.prepare_url('coupons/%(campaign_id)s/website/%(website_id)s') + + def get(self, _id, **kwargs): + """ + Here _id is a websites id + + Args: + _id (int) + campaign (list of int) + campaign_category (list of int) + category (list of int) + search (str) + type (str) + limit (int) + offset (int) + order_by (str) + + """ + request_data = { + 'url': self.URL, + 'website_id': Item.sanitize_id(_id) + } + + filtering = { + 'filter_by': kwargs, + 'available': self.FILTERING + } + + ordering = { + 'order_by': kwargs.get('order_by', None), + 'available': self.ORDERING + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(**request_data) + + def getOne(self, _id, c_id, **kwargs): + """ + Here _id is a websites id and c_id is a coupon id + + Args: + _id (int) + c_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'website_id': Item.sanitize_id(_id), + 'campaign_id': Item.sanitize_id(c_id) + } + + return self.transport.get().request(**request_data) + + +class CouponsCategories(CouponsBase): + + SCOPE = 'public_data' + + URL = Item.prepare_url('coupons/categories') + SINGLE_URL = Item.prepare_url('coupons/categories/%(coupon_category_id)s') + + def get(self, **kwargs): + """ + Args: + limit (int) + offset (int) + + """ + return self.transport.get().set_pagination(**kwargs).request(url=self.URL) + + def getOne(self, coupon_category_id): + """ + Args: + coupon_category_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'coupon_category_id': Item.sanitize_id(coupon_category_id) + } + + return self.transport.get().request(**request_data) diff --git a/admitad/items/deeplinks.py b/admitad/items/deeplinks.py new file mode 100644 index 0000000..ecaa49c --- /dev/null +++ b/admitad/items/deeplinks.py @@ -0,0 +1,41 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'DeeplinksManage', +] + + +class DeeplinksManage(Item): + + SCOPE = 'deeplink_generator' + + CREATE_URL = Item.prepare_url('deeplink/%(website_id)s/advcampaign/%(campaign_id)s') + + CREATE_FIELDS = { + 'ulp': lambda x: Item.sanitize_string_array(x, 'ulp'), + 'subid': lambda x: Item.sanitize_string_value(x, 'subid', max_length=30), + # todo: subid[1-4] + } + + def create(self, website_id, campaign_id, **kwargs): + """ + Args: + website_id (int) + campaign_id (int) + ulp (list of str) + subid (str) + + """ + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + + request_data = { + 'url': self.CREATE_URL, + 'website_id': Item.sanitize_id(website_id), + 'campaign_id': Item.sanitize_id(campaign_id), + } + + return self.transport.get().set_data(data).request(**request_data) diff --git a/admitad/items/landings.py b/admitad/items/landings.py new file mode 100644 index 0000000..27c699f --- /dev/null +++ b/admitad/items/landings.py @@ -0,0 +1,56 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'Landings', + 'LandingsForWebsite', +] + + +class Landings(Item): + + SCOPE = 'landings' + + URL = Item.prepare_url('landings/%(campaign_id)s') + + def get(self, campaign_id, **kwargs): + """ + Args: + campaign_id (int) + limit (int) + offset (int) + + """ + request_data = { + 'url': self.URL, + 'campaign_id': Item.sanitize_id(campaign_id), + } + + return self.transport.get().set_pagination(**kwargs).request(**request_data) + + +class LandingsForWebsite(Item): + + SCOPE = 'landings' + + URL = Item.prepare_url('landings/%(campaign_id)s/website/%(website_id)s') + + def get(self, campaign_id, website_id, **kwargs): + """ + Args: + campaign_id (int) + website_id (int) + limit (int) + offset (int) + + """ + request_data = { + 'url': self.URL, + 'campaign_id': Item.sanitize_id(campaign_id), + 'website_id': Item.sanitize_id(website_id), + } + + return self.transport.get().set_pagination(**kwargs).request(**request_data) diff --git a/admitad/items/links.py b/admitad/items/links.py new file mode 100644 index 0000000..de58f96 --- /dev/null +++ b/admitad/items/links.py @@ -0,0 +1,25 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +class LinksValidator(Item): + + SCOPE = 'validate_links' + + URL = Item.prepare_url('validate_links') + + GET_FIELDS = { + 'link': lambda x: Item.sanitize_string_value(x, 'link'), + } + + def get(self, link, **kwargs): + """ + Args: + link (str) + + """ + data = Item.sanitize_fields(self.GET_FIELDS, link=link) + + return self.transport.get().set_data(data).request(url=self.URL) diff --git a/admitad/items/lost_orders.py b/admitad/items/lost_orders.py new file mode 100644 index 0000000..384d2b4 --- /dev/null +++ b/admitad/items/lost_orders.py @@ -0,0 +1,131 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'LostOrders', + 'LostOrdersManager', +] + + +class LostOrders(Item): + + SCOPE = 'lost_orders' + + URL = Item.prepare_url('lost_orders') + SINGLE_URL = Item.prepare_url('lost_orders/%(lost_order_id)s') + + def get(self, **kwargs): + """ + Args: + campaign (id) + website (id) + status (string) + start_date (date) + end_date (date) + appeal_id (string) + appeal_status (string) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'status': lambda x: Item.sanitize_string_value(x, 'status', blank=True), + 'start_date': lambda x: Item.sanitize_string_value(x, 'start_date', blank=True), + 'end_date': lambda x: Item.sanitize_string_value(x, 'end_date', blank=True), + 'appeal_id': lambda x: Item.sanitize_string_value(x, 'appeal_id', blank=True), + 'appeal_status': lambda x: Item.sanitize_string_value(x, 'appeal_status', blank=True), + } + } + + return self.transport.get() \ + .set_filtering(filtering) \ + .set_pagination(**kwargs) \ + .request(url=self.URL) + + def getOne(self, lost_order_id): + """ + Args: + lost_order_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'lost_order_id': Item.sanitize_id(lost_order_id) + } + + return self.transport.get().request(**request_data) + + +class LostOrdersManager(Item): + + SCOPE = 'manage_lost_orders' + + DELETE_URL = Item.prepare_url('lost_orders/%(lost_order_id)s/decline') + CREATE_URL = Item.prepare_url('lost_orders/create') + UPDATE_URL = Item.prepare_url('lost_orders/%(lost_order_id)s/update') + + CREATE_FIELDS = { + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign'), + 'website': lambda x: Item.sanitize_integer_value(x, 'website'), + 'order_id': lambda x: Item.sanitize_string_value(x, 'order_id'), + 'order_date': lambda x: Item.sanitize_date(x, 'order_date'), + 'order_price': lambda x: Item.sanitize_float_value(x, 'order_price'), + 'comment': lambda x: Item.sanitize_string_value(x, 'comment'), + 'appeal_id': lambda x: Item.sanitize_string_value(x, 'appeal_id'), + } + + def delete(self, lost_order_id): + """ + Args: + lost_order_id (int) + + """ + request_data = { + 'url': self.DELETE_URL, + 'lost_order_id': Item.sanitize_id(lost_order_id), + } + + return self.transport.delete().request(**request_data) + + def create(self, attachments, **kwargs): + """ + Args: + attachments (list of str) + campaign (int) + website (int) + order_id (str) + order_date (date) + order_price (float) + appeal_id (str) + comment (str) + + """ + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + files = [('attachment', open(item, 'rb')) for item in Item.sanitize_string_array(attachments, 'attachments')] + + return self.transport.post().set_data(data).set_files(files).request(url=self.CREATE_URL) + + def update(self, lost_order_id, appeal_status): + """ + Args: + lost_order_id (int) + appeal_status (str) + + """ + request_data = { + 'url': self.UPDATE_URL, + 'lost_order_id': Item.sanitize_id(lost_order_id), + } + + data = { + 'appeal_status': self.sanitize_string_value(appeal_status, 'appeal_status'), + } + + return self.transport.put().set_data(data).request(**request_data) diff --git a/admitad/items/me.py b/admitad/items/me.py new file mode 100644 index 0000000..1274067 --- /dev/null +++ b/admitad/items/me.py @@ -0,0 +1,81 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = ( + 'Me', + 'Balance', + 'PaymentsSettings', +) + + +class Me(Item): + """ + Get private information + + """ + + SCOPE = 'private_data private_data_email private_data_phone' + + URL = Item.prepare_url('me') + + def __call__(self): + return self.get() + + def get(self): + return self.transport.get().request(url=self.URL) + + +class Balance(Item): + """ + Get balance information + + """ + + SCOPE = 'private_data_balance' + + URL = Item.prepare_url('me/balance') + EXTENDED_URL = Item.prepare_url('me/balance/extended') + + def __call__(self, **kwargs): + return self.get(**kwargs) + + def get(self, **kwargs): + """ + Args: + extended (bool) + + """ + url = self.EXTENDED_URL if kwargs.get('extended', False) else self.URL + + return self.transport.get().request(url=url) + + +class PaymentsSettings(Item): + """ + Get payments settings by currency + + """ + + SCOPE = 'private_data_balance' + + URL = Item.prepare_url('me/payment/settings') + CURRENCY_URL = Item.prepare_url('me/payment/settings/%(currency)s') + + def __call__(self, **kwargs): + return self.get(**kwargs) + + def get(self, currency=None): + """ + Args: + currency (str) + + """ + request_data = { + 'currency': Item.sanitize_currency_value(currency, blank=True), + 'url': self.CURRENCY_URL if currency else self.URL + } + + return self.transport.get().request(**request_data) diff --git a/admitad/items/news.py b/admitad/items/news.py new file mode 100644 index 0000000..8f1dcbd --- /dev/null +++ b/admitad/items/news.py @@ -0,0 +1,62 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'News', +] + + +class News(Item): + """ + List of news + + """ + + SCOPE = 'public_data' + + URL = Item.prepare_url('news') + SINGLE_URL = Item.prepare_url('news/%(news_id)s') + + def get(self, **kwargs): + """ + Args: + limit (int) + offset (int) + language (str) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'language': lambda x: Item.sanitize_string_value(x, 'language', 2, 2, True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, news_id, **kwargs): + """ + Args: + news_id (int) + language (str) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'news_id': self.sanitize_id(news_id) + } + + filtering = { + 'filter_by': kwargs, + 'available': { + 'language': lambda x: Item.sanitize_string_value(x, 'language', 2, 2, True), + } + } + + return self.transport.get().set_filtering(filtering).request(**request_data) diff --git a/admitad/items/optcodes.py b/admitad/items/optcodes.py new file mode 100644 index 0000000..865aae4 --- /dev/null +++ b/admitad/items/optcodes.py @@ -0,0 +1,159 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'OptCodes', + 'OfferStatusOptCodesManager', + 'ActionOptCodesManager', +] + + +class BaseOptCodes(Item): + + DESC_MODE_SIMPLE = 0 + DESC_MODE_EXTENDED = 1 + + METHOD_GET = 0 + METHOD_POST = 1 + + ACTION_TYPE_ALL = 0 + ACTION_TYPE_SALE = 1 + ACTION_TYPE_LEAD = 2 + + ACTION_STATUS_NEW = 5 + ACTION_STATUS_APPROVED = 6 + ACTION_STATUS_DECLINED = 7 + ACTION_STATUS_PENDING = 8 + + EVENT_ACTION = 0 + EVENT_OFFER_STATUS = 1 + EVENT_REFERRAL = 2 + + +class OptCodes(BaseOptCodes): + + SCOPE = 'opt_codes' + + ORDERING = ('action_type', 'method', 'desc_mode') + + URL = Item.prepare_url('opt_codes') + SINGLE_URL = Item.prepare_url('opt_codes/%(optcode_id)s') + + def get(self, **kwargs): + """ + Args: + campaign (int) + website (int) + limit (int) + offset (int) + order_by (list of str) + + """ + ordering = { + 'order_by': kwargs.get('order_by', []), + 'available': self.ORDERING + } + + filtering = { + 'filter_by': kwargs, + 'available': { + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, optcode_id, **kwargs): + """ + Args: + optcode_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'optcode_id': Item.sanitize_id(optcode_id) + } + + return self.transport.get().request(**request_data) + + +class BaseOptCodesManager(BaseOptCodes): + + SCOPE = 'manage_opt_codes' + + DELETE_URL = Item.prepare_url('opt_codes/delete/%(optcode_id)s') + CREATE_URL = '' + UPDATE_URL = '' + + CREATE_FIELDS = {} + UPDATE_FIELDS = {} + + def delete(self, optcode_id): + request_data = { + 'url': self.DELETE_URL, + 'optcode_id': Item.sanitize_id(optcode_id), + } + return self.transport.post().request(**request_data) + + def create(self, **kwargs): + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + return self.transport.post().set_data(data).request(url=self.CREATE_URL) + + def update(self, optcode_id, **kwargs): + data = Item.sanitize_fields(self.UPDATE_FIELDS, **kwargs) + request_data = { + 'url': self.UPDATE_URL, + 'optcode_id': Item.sanitize_id(optcode_id), + } + + return self.transport.post().set_data(data).request(**request_data) + + +class OfferStatusOptCodesManager(BaseOptCodesManager): + + CREATE_URL = Item.prepare_url('opt_codes/offer/create') + UPDATE_URL = Item.prepare_url('opt_codes/offer/update/%(optcode_id)s') + + CREATE_FIELDS = { + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'desc_mode': lambda x: Item.sanitize_integer_value(x, 'desc_mode'), + 'url': lambda x: Item.sanitize_string_value(x, 'url'), + 'method': lambda x: Item.sanitize_integer_value(x, 'method'), + } + UPDATE_FIELDS = { + 'desc_mode': lambda x: Item.sanitize_integer_value(x, 'desc_mode', blank=True), + 'url': lambda x: Item.sanitize_string_value(x, 'url', blank=True), + 'method': lambda x: Item.sanitize_integer_value(x, 'method', blank=True), + } + + +class ActionOptCodesManager(BaseOptCodesManager): + + CREATE_URL = Item.prepare_url('opt_codes/action/create') + UPDATE_URL = Item.prepare_url('opt_codes/action/update/%(optcode_id)s') + + CREATE_FIELDS = { + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'desc_mode': lambda x: Item.sanitize_integer_value(x, 'desc_mode'), + 'url': lambda x: Item.sanitize_string_value(x, 'url'), + 'method': lambda x: Item.sanitize_integer_value(x, 'method'), + 'action_type': lambda x: Item.sanitize_integer_value(x, 'action_type'), + 'status': lambda x: Item.sanitize_integer_value(x, 'status'), + } + UPDATE_FIELDS = { + 'desc_mode': lambda x: Item.sanitize_integer_value(x, 'desc_mode', blank=True), + 'url': lambda x: Item.sanitize_string_value(x, 'url', blank=True), + 'method': lambda x: Item.sanitize_integer_value(x, 'method', blank=True), + 'action_type': lambda x: Item.sanitize_integer_value(x, 'action_type', blank=True), + 'status': lambda x: Item.sanitize_integer_value(x, 'status', blank=True), + } diff --git a/admitad/items/payments.py b/admitad/items/payments.py new file mode 100644 index 0000000..196f9f5 --- /dev/null +++ b/admitad/items/payments.py @@ -0,0 +1,149 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'Payments', + 'PaymentsStatement', + 'PaymentsManage', +] + + +class Payments(Item): + """ + List of webmaster payments + + """ + + SCOPE = 'payments' + + URL = Item.prepare_url('payments') + SINGLE_URL = Item.prepare_url('payments/%(payment_id)s') + + def get(self, **kwargs): + """ + Args: + has_statement (bool) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'has_statement': lambda x: Item.sanitize_bool_integer_value(x, 'has_statement', blank=True) + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, _id, **kwargs): + """ + Args: + _id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'payment_id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**request_data) + + +class PaymentsStatement(Item): + + SCOPE = 'payments' + + URL = Item.prepare_url('payments/%(payment_id)s/statement') + + def get(self, payment_id, **kwargs): + """ + Args: + detailed (bool) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'detailed': lambda x: Item.sanitize_bool_integer_value(x, 'detailed', blank=True) + } + } + + request_data = { + 'url': self.URL, + 'payment_id': Item.sanitize_id(payment_id) + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(**request_data) + + +class PaymentsManage(Item): + """ + Manage payments + + """ + + SCOPE = 'manage_payments' + + CREATE_URL = Item.prepare_url('payments/request/%(code)s') + CONFIRM_URL = Item.prepare_url('payments/confirm/%(payment_id)s') + DELETE_URL = Item.prepare_url('payments/delete/%(payment_id)s') + + def create(self, _code, **kwargs): + """ + Create a payment request. + _code is a code of currency + + Args: + _code (str) + + """ + request_data = { + 'url': self.CREATE_URL, + 'code': Item.sanitize_currency_value(_code) + } + + return self.transport.post().request(**request_data) + + def confirm(self, _id, **kwargs): + """ + Confirm a payment request. + _id is a payment id. + + Args: + _id (int) + + """ + request_data = { + 'url': self.CONFIRM_URL, + 'payment_id': Item.sanitize_id(_id) + } + + return self.transport.post().request(**request_data) + + def delete(self, _id, **kwargs): + """ + Delete a payment request. + _id is a payment id. + + Args: + _id (int) + + """ + request_data = { + 'url': self.DELETE_URL, + 'payment_id': Item.sanitize_id(_id) + } + + return self.transport.post().request(**request_data) diff --git a/admitad/items/referrals.py b/admitad/items/referrals.py new file mode 100644 index 0000000..123814c --- /dev/null +++ b/admitad/items/referrals.py @@ -0,0 +1,56 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'Referrals', +] + + +class Referrals(Item): + """ + List of referrals + + """ + + SCOPE = 'referrals' + + URL = Item.prepare_url('referrals') + SINGLE_URL = Item.prepare_url('referrals/%(referral_id)s') + + def get(self, **kwargs): + """ + Args: + date_start (date) + date_end (date) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', True) + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, _id, **kwargs): + """ + Args: + _id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'referral_id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**request_data) diff --git a/admitad/items/retag.py b/admitad/items/retag.py new file mode 100644 index 0000000..4096833 --- /dev/null +++ b/admitad/items/retag.py @@ -0,0 +1,150 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'Retag', + 'RetagManager' +] + + +class Retag(Item): + SCOPE = 'webmaster_retag' + + URL = Item.prepare_url('retag') + SINGLE_URL = Item.prepare_url('retag/%(retag_id)s') + LEVELS_FOR_WEBSITE_URL = Item.prepare_url('retag/website/%(website_id)s/levels') + LEVELS_FOR_CAMPAIGN_URL = Item.prepare_url('retag/advcampaign/%(campaign_id)s/levels') + + def get(self, **kwargs): + """ + Args: + website (int) + active (bool) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'website': lambda x: Item.sanitize_integer_value(x, 'website', True), + 'active': lambda x: Item.sanitize_bool_integer_value(x, 'active', True) + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, retag_id): + """ + Args: + retag_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'retag_id': Item.sanitize_id(retag_id) + } + + return self.transport.get().request(**request_data) + + def getLevelsForWebsite(self, website_id): + """ + Args: + website_id (int) + + """ + request_data = { + 'url': self.LEVELS_FOR_WEBSITE_URL, + 'website_id': Item.sanitize_id(website_id) + } + + return self.transport.get().request(**request_data) + + def getLevelsForCampaign(self, campaign_id): + """ + Args: + capaign_id (int) + + """ + request_data = { + 'url': self.LEVELS_FOR_CAMPAIGN_URL, + 'campaign_id': Item.sanitize_id(campaign_id) + } + + return self.transport.get().request(**request_data) + + +class RetagManager(Item): + + SCOPE = 'manage_webmaster_retag' + + CREATE_URL = Item.prepare_url('retag/create') + UPDATE_URL = Item.prepare_url('retag/update/%(retag_id)s') + DELETE_URL = Item.prepare_url('retag/delete/%(retag_id)s') + + CREATE_FIELDS = { + 'website': lambda x: Item.sanitize_integer_value(x, 'website'), + 'level': lambda x: Item.sanitize_integer_value(x, 'level'), + 'active': lambda x: Item.sanitize_bool_integer_value(x, 'active', blank=True), + 'script': lambda x: Item.sanitize_string_value(x, 'script'), + 'comment': lambda x: Item.sanitize_string_value(x, 'comment', blank=True), + } + + UPDATE_FIELDS = { + 'level': lambda x: Item.sanitize_integer_value(x, 'level', blank=True), + 'active': lambda x: Item.sanitize_bool_integer_value(x, 'active', blank=True), + 'script': lambda x: Item.sanitize_string_value(x, 'script', blank=True), + 'comment': lambda x: Item.sanitize_string_value(x, 'comment', blank=True), + } + + def create(self, **kwargs): + """ + Args: + website (int) + level (int) + active (bool) + script (str) + comment (str) + + """ + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + + return self.transport.post().set_data(data).request(url=self.CREATE_URL) + + def update(self, retag_id, **kwargs): + """ + Args: + retag_id (int) + level (int) + active (bool) + script (str) + comment (str) + + """ + request_data = { + 'url': self.UPDATE_URL, + 'retag_id': Item.sanitize_id(retag_id) + } + + data = Item.sanitize_fields(self.UPDATE_FIELDS, **kwargs) + + return self.transport.post().set_data(data).request(**request_data) + + def delete(self, retag_id): + """ + Args: + retag_id (int) + + """ + request_data = { + 'url': self.DELETE_URL, + 'retag_id': Item.sanitize_id(retag_id) + } + + return self.transport.post().request(**request_data) diff --git a/admitad/items/short_links.py b/admitad/items/short_links.py new file mode 100644 index 0000000..37b5b27 --- /dev/null +++ b/admitad/items/short_links.py @@ -0,0 +1,30 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'ShortLinks' +] + + +class ShortLinks(Item): + + SCOPE = 'short_links' + + URL = Item.prepare_url('shortlink/modify/') + + GET_FIELDS = { + 'link': lambda x: Item.sanitize_string_value(x, 'link'), + } + + def post(self, link, **kwargs): + """ + Args: + link (str) + + """ + data = Item.sanitize_fields(self.GET_FIELDS, link=link) + + return self.transport.post().set_data(data).request(url=self.URL) diff --git a/admitad/items/statistics.py b/admitad/items/statistics.py new file mode 100644 index 0000000..3fecdc4 --- /dev/null +++ b/admitad/items/statistics.py @@ -0,0 +1,456 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from copy import copy + +from admitad.constants import MAX_SUB_ID_LENGTH +from admitad.items.base import Item + + +__all__ = [ + 'StatisticWebsites', + 'StatisticCampaigns', + 'StatisticDays', + 'StatisticMonths', + 'StatisticActions', + 'StatisticSubIds', + 'StatisticSources', + 'StatisticKeywords', +] + + +class StatisticBase(Item): + + STATUSES = (1, 2, 3) + SOURCES = ('g', 'y') + ACTION_TYPES = ('lead', 'sale') + + ORDERING = ( + 'action', + 'clicks', + 'cr', + 'ctr', + 'ecpc', + 'ecpm', + 'leads', + 'name', + 'payment_sum', + 'payment_sum_approved', + 'payment_sum_declined', + 'payment_sum_open', + 'sales', + 'views', + ) + + @staticmethod + def check_sub_id(sub_id): + return sub_id if len(sub_id) <= MAX_SUB_ID_LENGTH else None + + @staticmethod + def check_sources(source): + return source if source in StatisticBase.SOURCES else None, + + @staticmethod + def check_status(status): + return status if status in StatisticBase.STATUSES else None, + + @staticmethod + def check_actions_type(action_type): + return action_type if action_type in StatisticBase.ACTION_TYPES else None, + + FILTERING = { + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'total': lambda x: Item.sanitize_integer_value(x, 'total', blank=True), + 'subid': lambda x: StatisticBase.check_sub_id(x) + } + + def get(self, url, **kwargs): + """Base GET method""" + kwargs['url'] = url + + ordering = { + 'order_by': kwargs.get('order_by', []), + 'available': self.ORDERING + } + + filtering = { + 'filter_by': kwargs, + 'available': self.FILTERING, + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(**kwargs) + + +class StatisticWebsites(StatisticBase): + """ + Statistics by websites + + """ + + SCOPE = 'statistics' + + URL = Item.prepare_url('statistics/websites') + + def get(self, **kwargs): + """ + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + subid (str) + total (int) + limit (int) + offset (int) + order_by (list of str) + + """ + return super(StatisticWebsites, self).get(self.URL, **kwargs) + + +class StatisticCampaigns(StatisticBase): + """ + Statistics by campaigns + + """ + + SCOPE = 'statistics' + + URL = Item.prepare_url('statistics/campaigns') + + def get(self, **kwargs): + """ + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + subid (str) + total (int) + limit (int) + offset (int) + order_by (str) + + """ + return super(StatisticCampaigns, self).get(self.URL, **kwargs) + + +class StatisticDays(StatisticBase): + """ + Statistics by days + + """ + + SCOPE = 'statistics' + + URL = Item.prepare_url('statistics/dates') + + def get(self, **kwargs): + """ + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + subid (str) + total (int) + limit (int) + offset (int) + order_by (str) + + """ + return super(StatisticDays, self).get(self.URL, **kwargs) + + +class StatisticMonths(StatisticBase): + """ + Statistics by months + + """ + + SCOPE = 'statistics' + + URL = Item.prepare_url('statistics/months') + + def get(self, **kwargs): + """ + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + subid (str) + total (int) + limit (int) + offset (int) + order_by (str) + + """ + return super(StatisticMonths, self).get(self.URL, **kwargs) + + +class StatisticActions(StatisticBase): + """ + Statistics by actions + + """ + + SCOPE = 'statistics' + + ORDERING = ( + 'action', + 'banner', + 'banner_id', + 'campaign', + 'cart', + 'click_date', + 'conv_time', + 'datetime', + 'payment', + 'status', + 'subid', + 'subid1', + 'subid2', + 'subid3', + 'subid4', + 'website' + ) + + FILTERING = { + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + 'closing_date_start': lambda x: Item.sanitize_date(x, 'closing_date_start', blank=True), + 'closing_date_end': lambda x: Item.sanitize_date(x, 'closing_date_end', blank=True), + 'status_updated_start': lambda x: Item.sanitize_long_date(x, 'status_updated_start', blank=True), + 'status_updated_end': lambda x: Item.sanitize_long_date(x, 'status_updated_end', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'subid': lambda x: StatisticBase.check_sub_id(x), + 'subid1': lambda x: StatisticBase.check_sub_id(x), + 'subid2': lambda x: StatisticBase.check_sub_id(x), + 'subid3': lambda x: StatisticBase.check_sub_id(x), + 'subid4': lambda x: StatisticBase.check_sub_id(x), + 'source': lambda x: StatisticBase.check_sources(x), + 'status': lambda x: StatisticBase.check_status(x), + 'keyword': lambda x: Item.sanitize_string_value(x, 'keyword', blank=True), + 'action': lambda x: Item.sanitize_string_value(x, 'action', blank=True), + 'action_type': lambda x: StatisticBase.check_actions_type(x), + 'action_id': lambda x: Item.sanitize_integer_value(x, 'action_id', blank=True), + } + + URL = Item.prepare_url('statistics/actions') + + def get(self, **kwargs): + """ + Args: + date_start (date) + date_end (date) + closing_date_start (date) + closing_date_end (date) + status_updated_start (date) + status_updated_end (date) + website (int) + campaign (int) + subid (str) + subid1 (str) + subid2 (str) + subid3 (str) + subid4 (str) + source (str) + status (int) + keyword (str) + action (str) + action_type (str) + action_id (int) + limit (int) + offset (int) + order_by (list of int) + + """ + return super(StatisticActions, self).get(self.URL, **kwargs) + + +class StatisticSubIds(StatisticBase): + """ + Statistics by sub-ids + + """ + + SCOPE = 'statistics' + + SUB_ID_NUMBERS = range(0, 5) + + ORDERING = ( + 'actions', + 'clicks', + 'cr', + 'ecpc', + 'leads', + 'payment_sum', + 'payment_sum_approved', + 'payment_sum_declined', + 'payment_sum_open', + 'sales' + ) + + FILTERING = { + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + } + + URL = Item.prepare_url('statistics/sub_ids%(subid_number)s') + + def sanitize_sub_id_number(self, number): + if number not in self.SUB_ID_NUMBERS: + raise ValueError("Invalid subid number. '%s': %s" % (number, self.SUB_ID_NUMBERS)) + + def prepare_filtering(self, sub_id_number): + params = copy(self.FILTERING) + subid_params = dict([ + ('subid%s' % (val or ''), StatisticBase.check_sub_id) + for val in self.SUB_ID_NUMBERS if val != sub_id_number]) + params.update(subid_params) + return params + + def prepare_ordering(self, sub_id_number): + sub_id_name = 'subid%s' % (sub_id_number or '') + return self.ORDERING + (sub_id_name,) + + def get(self, sub_id_number=0, **kwargs): + """ + Here sub_id_number is subid number. + It is allowed from 0 to 5 excluding. + It just will send request to sub_ids, sub_ids1, sub_ids2, + sub_ids3, sub_ids4 urls correspondingly. + + res = client.StatisticSubIds.get() + res = client.StatisticSubIds.get(date_start='01.01.2013') + res = client.StatisticSubIds.get(subid="ADS778") + res = client.StatisticSubIds.get(subid1="ADS778", sub_id_number=2) + res = client.StatisticSubIds.get(limit=2) + + """ + self.sanitize_sub_id_number(sub_id_number) + kwargs['url'] = self.URL % { + 'subid_number': sub_id_number or '' + } + + ordering = { + 'order_by': kwargs.get('order_by', []), + 'available': self.prepare_ordering(sub_id_number) + } + + filtering = { + 'filter_by': kwargs, + 'available': self.prepare_filtering(sub_id_number) + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_ordering(ordering) \ + .set_filtering(filtering) \ + .request(**kwargs) + + +class StatisticSources(StatisticBase): + """ + Statistics by sources + + """ + + SCOPE = 'statistics' + + ORDERING = ( + 'actions', + 'clicks', + 'cr', + 'ecpc', + 'leads', + 'payment_sum', + 'payment_sum_approved', + 'payment_sum_declined', + 'payment_sum_open', + 'sales', + 'source', + ) + + FILTERING = { + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + } + + URL = Item.prepare_url('statistics/sources') + + def get(self, **kwargs): + """ + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + limit (int) + offset (int) + order_by (list of int) + + """ + return super(StatisticSources, self).get(self.URL, **kwargs) + + +class StatisticKeywords(StatisticBase): + """ + Statistics by keywords + + """ + + SCOPE = 'statistics' + + ORDERING = ( + 'actions', + 'clicks', + 'cr', + 'ecpc', + 'keyword', + 'leads', + 'payment_sum', + 'payment_sum_approved', + 'payment_sum_declined', + 'payment_sum_open', + 'sales', + 'source', + ) + + FILTERING = { + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', blank=True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', blank=True), + 'website': lambda x: Item.sanitize_integer_value(x, 'website', blank=True), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign', blank=True), + 'source': StatisticBase.check_sources, + } + + URL = Item.prepare_url('statistics/keywords') + + def get(self, **kwargs): + """ + Args: + date_start (date) + date_end (date) + website (int) + campaign (int) + source (str) + limit (int) + offset (int) + order_by (list of str) + + """ + return super(StatisticKeywords, self).get(self.URL, **kwargs) diff --git a/admitad/items/tickets.py b/admitad/items/tickets.py new file mode 100644 index 0000000..a34dedb --- /dev/null +++ b/admitad/items/tickets.py @@ -0,0 +1,104 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = [ + 'Tickets', + 'TicketsManager' +] + + +class Tickets(Item): + SCOPE = 'tickets' + + URL = Item.prepare_url('tickets') + SINGLE_URL = Item.prepare_url('tickets/%(ticket_id)s') + + def get(self, **kwargs): + """ + Args: + date_start (date) + date_end (date) + status (int) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'date_start': lambda x: Item.sanitize_date(x, 'date_start', True), + 'date_end': lambda x: Item.sanitize_date(x, 'date_end', True), + 'status': lambda x: Item.sanitize_integer_value(x, 'status', True), + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, ticket_id): + """ + Args: + ticket_id (int) + + """ + request_data = { + 'url': self.SINGLE_URL, + 'ticket_id': Item.sanitize_id(ticket_id) + } + + return self.transport.get().request(**request_data) + + +class TicketsManager(Item): + + SCOPE = 'manage_tickets' + + CREATE_URL = Item.prepare_url('tickets/create') + COMMENT_URL = Item.prepare_url('tickets/%(ticket_id)s/create') + + CREATE_FIELDS = { + 'subject': lambda x: Item.sanitize_string_value(x, 'subject'), + 'text': lambda x: Item.sanitize_string_value(x, 'text'), + 'campaign': lambda x: Item.sanitize_integer_value(x, 'campaign'), + 'category': lambda x: Item.sanitize_integer_value(x, 'category'), + 'priority': lambda x: Item.sanitize_integer_value(x, 'priority'), + } + + COMMENT_FIELDS = { + 'text': lambda x: Item.sanitize_string_value(x, 'text'), + } + + def create(self, **kwargs): + """ + Args: + subject (str) + text (str) + campaign (int) + category (int) + priority (int) + + """ + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + + return self.transport.post().set_data(data).request(url=self.CREATE_URL) + + def comment(self, ticket_id, **kwargs): + """ + Args: + ticket_id (int) + text (str) + + """ + request_data = { + 'url': self.COMMENT_URL, + 'ticket_id': Item.sanitize_id(ticket_id) + } + + data = Item.sanitize_fields(self.COMMENT_FIELDS, **kwargs) + + return self.transport.post().set_data(data).request(**request_data) diff --git a/admitad/items/websites.py b/admitad/items/websites.py new file mode 100644 index 0000000..0e53e81 --- /dev/null +++ b/admitad/items/websites.py @@ -0,0 +1,201 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from admitad.items.base import Item + + +__all__ = ( + 'Websites', + 'WebsitesManage' +) + + +class Websites(Item): + """ + List of websites + + """ + + SCOPE = 'websites' + + URL = Item.prepare_url('websites') + SINGLE_URL = Item.prepare_url('websites/%(website_id)s') + + STATUS_NEW = 'new' + STATUS_PENDING = 'pending' + STATUS_ACTIVE = 'active' + STATUS_SUSPENDED = 'suspended' + STATUS_DECLINED = 'declined' + + CAMPAIGN_STATUS_PENDING = 'pending' + CAMPAIGN_STATUS_ACTIVE = 'active' + CAMPAIGN_STATUS_DECLINED = 'declined' + CAMPAIGN_STATUS_DISABLED = 'disabled' + + STATUS_LIST = [ + STATUS_NEW, STATUS_PENDING, STATUS_ACTIVE, + STATUS_SUSPENDED, STATUS_DECLINED + ] + CAMPAIGN_STATUS_LIST = [ + CAMPAIGN_STATUS_PENDING, CAMPAIGN_STATUS_ACTIVE, + CAMPAIGN_STATUS_DECLINED, CAMPAIGN_STATUS_DISABLED + ] + + def get(self, **kwargs): + """ + Args: + status (str) + campaign_status (str) + limit (int) + offset (int) + + """ + filtering = { + 'filter_by': kwargs, + 'available': { + 'status': lambda x: x if x in self.STATUS_LIST else None, + 'campaign_status': lambda x: x if x in self.CAMPAIGN_STATUS_LIST else None + } + } + + return self.transport.get() \ + .set_pagination(**kwargs) \ + .set_filtering(filtering) \ + .request(url=self.URL) + + def getOne(self, _id, **kwargs): + """ + Args: + _id (int) + + """ + requests_data = { + 'url': self.SINGLE_URL, + 'website_id': Item.sanitize_id(_id) + } + + return self.transport.get().request(**requests_data) + + +class WebsitesManage(Item): + """ + Manage websites + + """ + + SCOPE = 'manage_websites' + + CREATE_URL = Item.prepare_url('website/create') + UPDATE_URL = Item.prepare_url('website/update/%(website_id)s') + VERIFY_URL = Item.prepare_url('website/verify/%(website_id)s') + DELETE_URL = Item.prepare_url('website/delete/%(website_id)s') + + CREATE_FIELDS = { + 'name': lambda x: Item.sanitize_string_value( + x, 'name', max_length=200), + 'kind': lambda x: Item.sanitize_string_value( + x, 'kind', max_length=20), + 'language': lambda x: Item.sanitize_string_value( + x, 'language', max_length=2), + 'adservice': lambda x: Item.sanitize_integer_value( + x, 'adservice', blank=True), + 'site_url': lambda x: Item.sanitize_string_value( + x, 'site_url', max_length=255), + 'description': lambda x: Item.sanitize_string_value( + x, 'description', max_length=20000, min_length=100), + 'categories': lambda x: Item.sanitize_integer_array( + x, 'categories'), + 'regions': lambda x: Item.sanitize_string_array( + x, 'regions', max_length=2), + 'mailing_targeting': lambda x: Item.sanitize_bool_integer_value( + x, 'mailing_targeting', blank=True) + } + + UPDATE_FIELDS = { + 'name': lambda x: Item.sanitize_string_value( + x, 'name', max_length=200, blank=True), + 'kind': lambda x: Item.sanitize_string_value( + x, 'kind', max_length=20, blank=True), + 'language': lambda x: Item.sanitize_string_value( + x, 'language', max_length=2, blank=True), + 'adservice': lambda x: Item.sanitize_integer_value( + x, 'adservice', blank=True), + 'site_url': lambda x: Item.sanitize_string_value( + x, 'site_url', max_length=255, blank=True), + 'description': lambda x: Item.sanitize_string_value( + x, 'description', max_length=20000, min_length=100, blank=True), + 'categories': lambda x: Item.sanitize_integer_array( + x, 'categories', blank=True), + 'regions': lambda x: Item.sanitize_string_array( + x, 'regions', max_length=2, blank=True), + 'mailing_targeting': lambda x: Item.sanitize_bool_integer_value( + x, 'mailing_targeting', blank=True) + } + + def create(self, **kwargs): + """ + Args: + name (str) + kind (str) + language (str) + adservice (int) + site_url (str) + description (str) + categories (list of int) + regions (list of str) + mailing_targeting (bool) + + """ + data = Item.sanitize_fields(self.CREATE_FIELDS, **kwargs) + + return self.transport.post().set_data(data).request(url=self.CREATE_URL) + + def update(self, _id, **kwargs): + """ + Args: + _id (int) + name (str) + kind (str) + language (str) + adservice (int) + site_url (str) + description (str) + categories (list of int) + regions (list of str) + mailing_targeting (bool) + + """ + data = Item.sanitize_fields(self.UPDATE_FIELDS, **kwargs) + + request_data = { + 'url': self.UPDATE_URL, + 'website_id': Item.sanitize_id(_id) + } + + return self.transport.post().set_data(data).request(**request_data) + + def verify(self, _id): + """ + Args: + _id (int) + + """ + request_data = { + 'url': self.VERIFY_URL, + 'website_id': Item.sanitize_id(_id) + } + + return self.transport.post().request(**request_data) + + def delete(self, _id): + """ + Args: + _id (int) + + """ + request_data = { + 'url': self.DELETE_URL, + 'website_id': Item.sanitize_id(_id) + } + + return self.transport.post().request(**request_data) diff --git a/pyadmitad/tests/__init__.py b/admitad/tests/__init__.py similarity index 100% rename from pyadmitad/tests/__init__.py rename to admitad/tests/__init__.py diff --git a/admitad/tests/base.py b/admitad/tests/base.py new file mode 100644 index 0000000..7863544 --- /dev/null +++ b/admitad/tests/base.py @@ -0,0 +1,20 @@ +# coding: utf-8 +from __future__ import unicode_literals + +from future import standard_library +standard_library.install_aliases() + +from unittest import TestCase +from urllib.parse import urlencode + +from admitad.api import get_oauth_client_token + + +class BaseTestCase(TestCase): + client = get_oauth_client_token(access_token='') + + @staticmethod + def prepare_url(url, params=None, **kwargs): + base = url % kwargs + + return base if not params else '%s?%s' % (base, urlencode(params, doseq=True)) diff --git a/admitad/tests/data/image.png b/admitad/tests/data/image.png new file mode 100644 index 0000000..b96adc6 Binary files /dev/null and b/admitad/tests/data/image.png differ diff --git a/admitad/tests/test_announcements.py b/admitad/tests/test_announcements.py new file mode 100644 index 0000000..991cbaf --- /dev/null +++ b/admitad/tests/test_announcements.py @@ -0,0 +1,80 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import Announcements +from admitad.tests.base import BaseTestCase + + +class AnnouncementsTestCase(BaseTestCase): + + def test_get_announcements_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Announcements.URL, params={ + 'limit': 1, + 'offset': 230 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 50, + 'limit': 1, + 'offset': 230 + }, + 'results': [{ + 'message': 'Message', + 'id': 264, + 'advcampaign': { + 'id': 8, + 'name': 'AdvCamp' + }, + 'event': 'request_accepted' + }] + }, + status=200 + ) + + result = self.client.Announcements.get(limit=1, offset=230) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(1, len(result['results'])) + + def test_get_announcements_request_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Announcements.SINGLE_URL, announcement_id=264), + match_querystring=True, + json={ + '_meta': { + 'count': 50, + 'limit': 1, + 'offset': 230 + }, + 'results': [{ + 'message': 'Message', + 'id': 264, + 'advcampaign': { + 'id': 8, + 'name': 'AdvCamp' + }, + 'event': 'request_accepted' + }] + }, + status=200 + ) + + result = self.client.Announcements.getOne(264) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(1, len(result['results'])) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_auxiliary.py b/admitad/tests/test_auxiliary.py new file mode 100644 index 0000000..d0bb1c8 --- /dev/null +++ b/admitad/tests/test_auxiliary.py @@ -0,0 +1,324 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.tests.base import BaseTestCase +from admitad.items.auxiliary import WebsiteTypes, WebsiteRegions, \ + SystemLanguages, SystemCurrencies, AdvertiserServices, CampaignCategories + + +class WebsiteTypesTestCase(BaseTestCase): + + def test_get_website_types_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(WebsiteTypes.URL, params={ + 'limit': 5, + 'offset': 0 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 9, + 'limit': 5, + 'offset': 0 + }, + 'results': [ + 'website', + 'doorway', + 'contextual', + 'youtube', + 'social_app', + ] + }, + status=200 + ) + + result = self.client.WebsiteTypes.get(limit=5) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(5, len(result['results'])) + + +class WebsiteRegionsTestCase(BaseTestCase): + + def test_get_website_regions_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(WebsiteRegions.URL, params={ + 'limit': 2, + 'offset': 0 + }), + match_querystring=True, + json={ + '_meta': { + 'limit': 2, + 'offset': 0, + 'count': 6 + }, + 'results': ['RU', 'EN'] + }, + status=200 + ) + + result = self.client.WebsiteRegions.get(limit=2) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(2, len(result['results'])) + + +class SystemLanguagesTestCase(BaseTestCase): + + def test_get_languages_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(SystemLanguages.URL, params={ + 'limit': 2, + 'offset': 0 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 4, + 'limit': 2, + 'offset': 1 + }, + 'results': [{ + 'flag': 'http://cdn.admitad.com/images/flags/en.svg', + 'language': 'English', + 'language_code': 'en' + }, { + 'flag': 'http://cdn.admitad.com/images/flags/de.svg', + 'language': 'Deutsch', + 'language_code': 'de' + }] + }, + status=200 + ) + + result = self.client.SystemLanguages.get(limit=2) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(2, len(result['results'])) + + def test_get_language_request_with_code(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(SystemLanguages.SINGLE_URL, code='en'), + json={ + 'flag': 'http://cdn.admitad.com/images/flags/en.svg', + 'language': 'English', + 'language_code': 'en' + }, + status=200 + ) + + result = self.client.SystemLanguages.getOne(code='en') + + self.assertIn('flag', result) + self.assertIn('language', result) + self.assertIn('language_code', result) + + +class SystemCurrenciesTestCase(BaseTestCase): + + def test_get_currencies_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(SystemCurrencies.URL, params={ + 'limit': 1, + 'offset': 0 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 4, + 'limit': 1, + 'offset': 0 + }, + 'results': [{ + 'code': 'USD', + 'min_sum': '25.00', + 'name': 'American dollar', + 'sign': '$' + }] + }, + status=200 + ) + + result = self.client.SystemCurrencies.get(limit=1) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(1, len(result['results'])) + + +class AdvertiserServicesTestCase(BaseTestCase): + + def test_get_advertiser_services(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(AdvertiserServices.URL, params={ + 'limit': 2, + 'offset': 1 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 12, + 'limit': 2, + 'offset': 1 + }, + 'results': [{ + 'allowed_referrers': '', + 'id': 4, + 'logo': 'http://cdn.admitad.com/adservice/images/f7e67e924fa05952f03e0c8c40a11651.png', + 'name': 'Google AdWords', + 'url': 'http://adwords.google.com/' + }, { + 'allowed_referrers': 'facebook.com', + 'id': 3, + 'logo': 'http://cdn.admitad.com/adservice/images/e6fee9e2ca69a2113d1339ecbe361ea5.png', + 'name': 'Facebook', + 'url': 'http://facebook.com/' + }] + }, + status=200 + ) + + result = self.client.AdvertiserServices.get(limit=2, offset=1) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(2, len(result['results'])) + + def test_get_advertiser_services_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(AdvertiserServices.SINGLE_URL, id=3), + match_querystring=True, + json={ + 'allowed_referrers': 'facebook.com', + 'id': 3, + 'logo': 'http://cdn.admitad.com/adservice/images/e6fee9e2ca69a2113d1339ecbe361ea5.png', + 'name': 'Facebook', + 'url': 'http://facebook.com/' + }, + status=200 + ) + + result = self.client.AdvertiserServices.getOne(3) + + self.assertIn('id', result) + self.assertIn('name', result) + self.assertIn('url', result) + + def test_get_advertiser_services_with_kind(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(AdvertiserServices.KIND_URL, kind='website', params={ + 'limit': 1, + 'offset': 0 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 10, + 'limit': 20, + 'offset': 0 + }, + 'results': [{ + 'allowed_referrers': 'facebook.com', + 'id': 3, + 'logo': 'http://cdn.admitad.com/adservice/images/e6fee9e2ca69a2113d1339ecbe361ea5.png', + 'name': 'Facebook', + 'url': 'http://facebook.com/' + }] + }, + status=200 + ) + + result = self.client.AdvertiserServices.getForKind('website', limit=1) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(1, len(result['results'])) + + +class CampaignsCategoriesTestCase(BaseTestCase): + + def test_get_campaigns_categories(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CampaignCategories.URL, params={ + 'limit': 2, + 'offset': 1, + 'order_by': 'name' + }), + match_querystring=True, + json={ + '_meta': { + 'count': 12, + 'limit': 2, + 'offset': 1 + }, + 'results': [{ + 'id': 13, + 'language': 'en', + 'name': 'MobileCategory', + 'parent': None + }, { + 'id': 33, + 'language': 'en', + 'name': 'ZooCategory', + 'parent': None + }] + }, + status=200 + ) + + result = self.client.CampaignCategories.get(limit=2, offset=1, order_by='name') + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(2, len(result['results'])) + + def test_get_campaigns_categories_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CampaignCategories.SINGLE_URL, id='13'), + match_querystring=True, + json={ + 'id': 13, + 'language': 'en', + 'name': 'MobileCategory', + 'parent': None + }, + status=200 + ) + + result = self.client.CampaignCategories.getOne(13) + + self.assertIn('id', result) + self.assertIn('name', result) + self.assertIn('parent', result) + self.assertIn('language', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_banners.py b/admitad/tests/test_banners.py new file mode 100644 index 0000000..5ca1dcd --- /dev/null +++ b/admitad/tests/test_banners.py @@ -0,0 +1,56 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import Banners, BannersForWebsite +from admitad.tests.base import BaseTestCase + + +class BannersTestCase(BaseTestCase): + + def test_get_banners_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Banners.URL, campaign_id=12, params={ + 'limit': 40, + 'offset': 10, + 'mobile_content': 'true' + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Banners.get(12, mobile_content=True, limit=40, offset=10) + + self.assertIn('status', result) + + +class BannersForWebsiteTestCase(BaseTestCase): + + def test_get_banners_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BannersForWebsite.URL, campaign_id=12, website_id=10, params={ + 'limit': 40, + 'offset': 10, + 'mobile_content': 'true', + 'landing': 6, + 'uri_scheme': 'https' + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.BannersForWebsite.get(12, 10, mobile_content=True, + landing=6, uri_scheme='https', + limit=40, offset=10) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_base.py b/admitad/tests/test_base.py new file mode 100644 index 0000000..d914472 --- /dev/null +++ b/admitad/tests/test_base.py @@ -0,0 +1,166 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +from datetime import datetime, date + +from admitad.items.base import Item +from admitad.tests.base import BaseTestCase +from admitad.constants import BASE_URL + + +class ItemTestCase(BaseTestCase): + + def test_sanitize_id(self): + self.assertEqual(Item.sanitize_id(2, ''), 2) + self.assertEqual(Item.sanitize_id(2**64, ''), 2**64) + self.assertEqual(Item.sanitize_id('64', ''), '64') + + with self.assertRaises(ValueError): + Item.sanitize_id(0, '') + Item.sanitize_id(None, '') + Item.sanitize_id(3.14, '') + Item.sanitize_id('foo', '') + + def test_sanitize_fields(self): + fields = { + 'field1': lambda x: Item.sanitize_non_blank_value(x, ''), + 'field2': lambda x: Item.sanitize_integer_value(x, ''), + 'field3': lambda x: Item.sanitize_string_value(x, '', blank=True), + } + + data = Item.sanitize_fields(fields, field1='foobarbaz', field2=42, field3='') + + self.assertDictEqual(data, { + 'field1': 'foobarbaz', + 'field2': 42, + 'field3': '' + }) + + data = Item.sanitize_fields(fields, field1='foobarbaz', field2=42, field3='', field4='another') + + self.assertDictEqual(data, { + 'field1': 'foobarbaz', + 'field2': 42, + 'field3': '' + }) + + def test_sanitize_non_blank_value(self): + self.assertEqual(Item.sanitize_non_blank_value(0, ''), 0) + self.assertEqual(Item.sanitize_non_blank_value('a', ''), 'a') + self.assertListEqual(Item.sanitize_non_blank_value([1], ''), [1]) + self.assertDictEqual(Item.sanitize_non_blank_value({'a': 1}, ''), {'a': 1}) + self.assertTupleEqual(Item.sanitize_non_blank_value((1, 2), ''), (1, 2)) + + with self.assertRaises(ValueError): + Item.sanitize_non_blank_value('', '') + Item.sanitize_non_blank_value([], '') + Item.sanitize_non_blank_value({}, '') + Item.sanitize_non_blank_value((), '') + Item.sanitize_non_blank_value(None, '') + + def test_sanitize_string_value(self): + self.assertEqual(Item.sanitize_string_value('foo', '', 10, None, False), 'foo') + self.assertEqual(Item.sanitize_string_value('foo', '', None, 2, False), 'foo') + self.assertEqual(Item.sanitize_string_value('foobarbaz', '', 10, 5, False), 'foobarbaz') + self.assertEqual(Item.sanitize_string_value('', '', None, None, True), '') + + with self.assertRaises(ValueError): + Item.sanitize_string_value('', '', None, None, False) + Item.sanitize_string_value('foo', '', 2, None, False) + Item.sanitize_string_value('foo', '', None, 5, False) + Item.sanitize_string_value('foobarbaz', '', 5, 6, False) + + def test_sanitize_integer_value(self): + self.assertEqual(Item.sanitize_integer_value(2, '', False), 2) + self.assertEqual(Item.sanitize_integer_value(0, '', False), 0) + self.assertEqual(Item.sanitize_integer_value(None, '', True), None) + self.assertEqual(Item.sanitize_integer_value(2**64, '', False), 2**64) + self.assertEqual(Item.sanitize_integer_value('64', '', False), '64') + + with self.assertRaises(ValueError): + Item.sanitize_integer_value(None, '', False) + Item.sanitize_integer_value(3.14, '', False) + Item.sanitize_integer_value('foo', '', False) + + def test_sanitize_float_value(self): + self.assertEqual(Item.sanitize_float_value(1, '', False), 1) + self.assertEqual(Item.sanitize_float_value(0, '', False), 0) + self.assertEqual(Item.sanitize_float_value('12', '', False), '12') + self.assertEqual(Item.sanitize_float_value('3.14', '', False), '3.14') + self.assertEqual(Item.sanitize_float_value(3.14, '', False), 3.14) + self.assertEqual(Item.sanitize_float_value(None, '', True), None) + + with self.assertRaises(ValueError): + Item.sanitize_float_value(None, '', False) + Item.sanitize_float_value('foo', '', False) + + def test_sanitize_integer_array(self): + self.assertEqual(Item.sanitize_integer_array(None, '', True), None) + self.assertEqual(Item.sanitize_integer_array([], '', True), []) + self.assertListEqual(Item.sanitize_integer_array([0, 1, '12'], '', False), [0, 1, '12']) + self.assertListEqual(Item.sanitize_integer_array([5, None, '1', None], '', True), [5, None, '1', None]) + self.assertListEqual(Item.sanitize_integer_array(5, ''), [5]) + + with self.assertRaises(ValueError): + Item.sanitize_integer_array(None, '', False) + Item.sanitize_integer_array([], '', False) + Item.sanitize_integer_array([1, 2, 3, None, 5], '', False) + + def test_sanitize_string_array(self): + self.assertEqual(Item.sanitize_string_array(None, '', None, None, True), None) + self.assertListEqual(Item.sanitize_string_array([], '', None, None, True), []) + self.assertListEqual(Item.sanitize_string_array('foo', ''), ['foo']) + self.assertListEqual(Item.sanitize_string_array([''], '', None, None, True), ['']) + self.assertListEqual(Item.sanitize_string_array(['foo', 'bar'], '', 10, 2, False), ['foo', 'bar']) + self.assertListEqual(Item.sanitize_string_array(['foo', 'bar'], '', None, None, False), ['foo', 'bar']) + + with self.assertRaises(ValueError): + Item.sanitize_string_array(None, '', False) + Item.sanitize_string_array([], '', False) + Item.sanitize_string_array([''], '', False) + Item.sanitize_string_array(['foobarbaz'], '', 5, 3, False) + Item.sanitize_string_array(['foobarbaz'], '', 5, None, False) + Item.sanitize_string_array(['foo'], '', None, 5, False) + + def test_sanitize_currency(self): + self.assertEqual(Item.sanitize_currency_value(None, True), None) + self.assertEqual(Item.sanitize_currency_value('', True), '') + self.assertEqual(Item.sanitize_currency_value('usd', False), 'USD') + self.assertEqual(Item.sanitize_currency_value('EUR', False), 'EUR') + + with self.assertRaises(ValueError): + Item.sanitize_currency_value(None, False) + Item.sanitize_currency_value('', False) + Item.sanitize_currency_value('foobarbaz', True) + Item.sanitize_currency_value('12', True) + + def test_sanitize_date(self): + self.assertEqual(Item.sanitize_date(None, '', True), None) + self.assertEqual(Item.sanitize_date(datetime(2020, 1, 1), '', False), '01.01.2020') + self.assertEqual(Item.sanitize_date(date(2020, 1, 1), '', False), '01.01.2020') + self.assertEqual(Item.sanitize_date('01.01.2020', '', False), '01.01.2020') + + with self.assertRaises(ValueError): + Item.sanitize_date(None, '', False) + Item.sanitize_date('01/01/2020', '', True) + + def test_sanitize_long_date(self): + self.assertEqual(Item.sanitize_long_date(None, '', True), None) + self.assertEqual(Item.sanitize_long_date(datetime(2020, 1, 1, 11, 20, 36), '', False), '01.01.2020 11:20:36') + self.assertEqual(Item.sanitize_long_date('01.01.2020 11:20:36', '', False), '01.01.2020 11:20:36') + + with self.assertRaises(ValueError): + Item.sanitize_long_date(None, '', False) + Item.sanitize_long_date('01/01/2020', '', True) + Item.sanitize_long_date('01.01.2020 11/22/22', '', False) + + def test_prepare_url(self): + self.assertEqual(Item.prepare_url('somepath'), '%ssomepath/' % BASE_URL) + self.assertEqual(Item.prepare_url('somepath/'), '%ssomepath/' % BASE_URL) + self.assertEqual(Item.prepare_url('/somepath'), '%ssomepath/' % BASE_URL) + self.assertEqual(Item.prepare_url('/somepath/'), '%ssomepath/' % BASE_URL) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_broken_links.py b/admitad/tests/test_broken_links.py new file mode 100644 index 0000000..07b1dc6 --- /dev/null +++ b/admitad/tests/test_broken_links.py @@ -0,0 +1,73 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import BrokenLinks, ManageBrokenLinks +from admitad.tests.base import BaseTestCase + + +class BrokenLinksTestCase(BaseTestCase): + + def test_get_broken_links_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BrokenLinks.URL, params={ + 'limit': 50, + 'offset': 2, + 'website': [1, 2, 3], + 'campaign': [1, 2], + 'search': 'some', + 'reason': 0, + 'date_start': '01.01.2010', + 'date_end': '01.01.2020' + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.BrokenLinks.get( + website=[1, 2, 3], campaign=[1, 2], + search='some', reason=0, + date_start='01.01.2010', date_end='01.01.2020', + limit=50, offset=2 + ) + + self.assertIn('status', result) + + def test_get_single_broken_link_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BrokenLinks.SINGLE_URL, broken_link_id=10), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.BrokenLinks.getOne(10) + + self.assertIn('status', result) + + +class ManageBrokenLinksTestCase(BaseTestCase): + + def test_resolve_broken_link_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(ManageBrokenLinks.RESOLVE_URL), + json={'status': 'ok'}, + status=200 + ) + + result = self.client.ManageBrokenLinks.resolve([10, 20]) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_campaigns.py b/admitad/tests/test_campaigns.py new file mode 100644 index 0000000..8f8e3d8 --- /dev/null +++ b/admitad/tests/test_campaigns.py @@ -0,0 +1,110 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import Campaigns, CampaignsForWebsite, \ + CampaignsManage +from admitad.tests.base import BaseTestCase + + +class CampaignsTestCase(BaseTestCase): + + def test_get_campaigns_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Campaigns.URL, params={ + 'website': 10, + 'has_tool': ['deeplink', 'retag'], + 'limit': 10, + 'offset': 0, + 'language': 'en' + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Campaigns.get(website=10, has_tool=['deeplink', 'retag'], + limit=10, offset=0, language='en') + + self.assertIn('status', result) + + def test_get_campaigns_request_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Campaigns.SINGLE_URL, campaign_id=10), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Campaigns.getOne(10) + + self.assertIn('status', result) + + +class CampaignsForWebsiteTestCase(BaseTestCase): + + def test_get_campaigns_for_websites_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CampaignsForWebsite.URL, website_id=16, params={ + 'limit': 26, + 'offset': 10 + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CampaignsForWebsite.get(16, limit=26, offset=10) + + self.assertIn('status', result) + + def test_get_campaigns_request_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CampaignsForWebsite.SINGLE_URL, website_id=10, campaign_id=88), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CampaignsForWebsite.getOne(10, 88) + + self.assertIn('status', result) + + +class CampaignsConnectWebsiteTestCase(BaseTestCase): + + def test_campaign_connect_websites_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(CampaignsManage.CONNECT_URL, campaign_id=10, website_id=22), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CampaignsManage.connect(10, 22) + + self.assertIn('status', result) + + def test_campaign_disconnect_websites_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(CampaignsManage.DISCONNECT_URL, campaign_id=10, website_id=22), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CampaignsManage.disconnect(10, 22) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_coupons.py b/admitad/tests/test_coupons.py new file mode 100644 index 0000000..9e13e88 --- /dev/null +++ b/admitad/tests/test_coupons.py @@ -0,0 +1,123 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import Coupons, CouponsForWebsite, CouponsCategories +from admitad.tests.base import BaseTestCase + + +class CouponsTestCase(BaseTestCase): + + def test_get_coupons_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Coupons.URL, params={ + 'campaign': [1, 5, 6], + 'campaign_category': [11, 12], + 'category': [22, 23], + 'type': 'some', + 'limit': 10, + 'offset': 0, + 'order_by': ['name', '-rating'] + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Coupons.get( + campaign=[1, 5, 6], campaign_category=[11, 12], + category=[22, 23], type='some', limit=10, offset=0, + order_by=['name', '-rating']) + + self.assertIn('status', result) + + def test_get_coupons_request_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Coupons.SINGLE_URL, coupon_id=42), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Coupons.getOne(42) + + self.assertIn('status', result) + + +class CouponsForWebsiteTestCase(BaseTestCase): + + def test_get_coupons_for_website_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CouponsForWebsite.URL, website_id=1, params={ + 'campaign': [1, 5, 6], + 'campaign_category': [11, 12], + 'category': [22, 23], + 'type': 'some', + 'limit': 10, + 'offset': 0, + 'order_by': ['name', '-rating'] + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CouponsForWebsite.get( + 1, campaign=[1, 5, 6], campaign_category=[11, 12], + category=[22, 23], type='some', limit=10, offset=0, + order_by=['name', '-rating']) + + self.assertIn('status', result) + + def test_get_coupons_for_website_request_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CouponsForWebsite.SINGLE_URL, website_id=10, campaign_id=20), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CouponsForWebsite.getOne(10, 20) + + self.assertIn('status', result) + + +class CouponsCategoriesTestCase(BaseTestCase): + + def test_get_categories_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CouponsCategories.URL, params={ + 'limit': 10, + 'offset': 0 + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CouponsCategories.get(limit=10, offset=0) + + self.assertIn('status', result) + + def test_get_categorty_with_id_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(CouponsCategories.SINGLE_URL, coupon_category_id=200), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.CouponsCategories.getOne(200) + + self.assertIn('status', result) + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_deeplinks.py b/admitad/tests/test_deeplinks.py new file mode 100644 index 0000000..b086c8e --- /dev/null +++ b/admitad/tests/test_deeplinks.py @@ -0,0 +1,32 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.tests.base import BaseTestCase +from admitad.items import DeeplinksManage + + +class DeeplinksManageTestCase(BaseTestCase): + + def test_deeplinks_create_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(DeeplinksManage.CREATE_URL, website_id=9, campaign_id=10, params={ + 'subid': '0987654321234567890', + 'ulp': 'https://google.com/' + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.DeeplinksManage.create(9, 10, subid='0987654321234567890', ulp='https://google.com/') + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_landings.py b/admitad/tests/test_landings.py new file mode 100644 index 0000000..a79b14f --- /dev/null +++ b/admitad/tests/test_landings.py @@ -0,0 +1,52 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import Landings, LandingsForWebsite +from admitad.tests.base import BaseTestCase + + +class LandingsTestCase(BaseTestCase): + + def test_landings_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Landings.URL, campaign_id=8, params={ + 'limit': 2, + 'offset': 0 + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Landings.get(8, limit=2, offset=0) + + self.assertIn('status', result) + + +class LandingsForWebsiteTestCase(BaseTestCase): + + def test_landings_for_website_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(LandingsForWebsite.URL, campaign_id=8, website_id=11, params={ + 'limit': 1, + 'offset': 0 + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.LandingsForWebsite.get(8, 11, limit=1) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_links.py b/admitad/tests/test_links.py new file mode 100644 index 0000000..a592a91 --- /dev/null +++ b/admitad/tests/test_links.py @@ -0,0 +1,35 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import LinksValidator +from admitad.tests.base import BaseTestCase + + +class LinksValidationTestCase(BaseTestCase): + + def test_link_validation_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(LinksValidator.URL, params={ + 'link': 'https://google.com/' + }), + match_querystring=True, + json={ + 'message': 'Link tested.', + 'success': 'Accepted' + }, + status=200 + ) + + result = self.client.LinksValidator.get('https://google.com/') + + self.assertIn('message', result) + self.assertIn('success', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_lost_orders.py b/admitad/tests/test_lost_orders.py new file mode 100644 index 0000000..d7e3170 --- /dev/null +++ b/admitad/tests/test_lost_orders.py @@ -0,0 +1,103 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import LostOrders, LostOrdersManager +from admitad.tests.base import BaseTestCase + + +class LostOrdersTestCase(BaseTestCase): + + def test_get_lost_orders_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(LostOrders.URL, params={ + 'limit': 20, + 'offset': 1, + 'appeal_status': 'resolved', + }), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.LostOrders.get( + limit=20, + offset=1, + appeal_status='resolved' + ) + + self.assertIn('status', result) + + def test_get_lost_order_by_id_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(LostOrders.SINGLE_URL, lost_order_id=12), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.LostOrders.getOne(12) + + self.assertIn('status', result) + + +class LostOrdersManagerTestCase(BaseTestCase): + + def test_create_lost_order(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(LostOrdersManager.CREATE_URL), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.LostOrdersManager.create( + attachments=['./admitad/tests/data/image.png'], + website=10, + campaign=20, + order_id='asd3f3', + order_date='01.01.2010', + order_price=1200, + comment='foo bar baz', + appeal_id='foo' + ) + + self.assertIn('status', result) + + def test_update_lost_order(self): + with responses.RequestsMock() as resp: + resp.add( + resp.PUT, + self.prepare_url(LostOrdersManager.UPDATE_URL, lost_order_id=10), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.LostOrdersManager.update( + lost_order_id=10, + appeal_status='resolved' + ) + + self.assertIn('status', result) + + def test_delete_lost_order(self): + with responses.RequestsMock() as resp: + resp.add( + resp.DELETE, + self.prepare_url(LostOrdersManager.DELETE_URL, lost_order_id=2), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.LostOrdersManager.delete(2) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_me.py b/admitad/tests/test_me.py new file mode 100644 index 0000000..a263ae1 --- /dev/null +++ b/admitad/tests/test_me.py @@ -0,0 +1,148 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import Me, Balance, PaymentsSettings +from admitad.tests.base import BaseTestCase + + +class MeTestCase(BaseTestCase): + + def test_me_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Me.URL), + json={ + 'id': 1, + 'username': 'username', + 'first_name': 'first_name', + 'last_name': 'last_name', + 'language': 'ru' + }, + status=200 + ) + + result = self.client.Me.get() + + self.assertEqual(result['id'], 1) + self.assertEqual(result['username'], 'username') + self.assertEqual(result['first_name'], 'first_name') + self.assertEqual(result['last_name'], 'last_name') + self.assertEqual(result['language'], 'ru') + + +class BalanceTestCase(BaseTestCase): + + def test_balance_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Balance.URL), + json=[{ + 'currency': 'USD', + 'balance': '20000.00' + }, { + 'currency': 'EUR', + 'balance': '0.00' + }], + status=200 + ) + + result = self.client.Balance.get() + + self.assertEqual(len(result), 2) + for item in result: + self.assertIn('balance', item) + self.assertIn('currency', item) + + def test_balance_extended_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Balance.EXTENDED_URL), + json=[{ + 'currency': 'USD', + 'balance': '20000.00', + 'processing': '20.00', + 'today': '0.00', + 'stalled': '100.00' + }, { + 'currency': 'EUR', + 'balance': '0.00', + 'processing': '2100.00', + 'today': '0.00', + 'stalled': '0.00' + }], + status=200 + ) + + result = self.client.Balance.get(extended=True) + + self.assertEqual(len(result), 2) + for item in result: + self.assertIn('balance', item) + self.assertIn('currency', item) + self.assertIn('processing', item) + self.assertIn('today', item) + self.assertIn('stalled', item) + + +class PaymentsSettingsTestCase(BaseTestCase): + + def test_payments_settings_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(PaymentsSettings.URL), + json=[{ + 'id': 11, + 'name': 'some_name', + 'currency': ['USD'], + 'withdrawal_type': 'webmoney' + }, { + 'id': 18, + 'name': 'some_another', + 'currency': ['EUR'], + 'withdrawal_type': 'paypal' + }], + status=200 + ) + + result = self.client.PaymentsSettings.get() + + self.assertEqual(len(result), 2) + for item in result: + self.assertIn('id', item) + self.assertIn('name', item) + self.assertIn('currency', item) + self.assertIn('withdrawal_type', item) + + def test_payments_settings_usd_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(PaymentsSettings.CURRENCY_URL, currency='USD'), + json=[{ + 'id': 11, + 'name': 'some_name', + 'currency': ['USD'], + 'withdrawal_type': 'webmoney' + }], + status=200 + ) + + result = self.client.PaymentsSettings.get(currency='USD') + + self.assertEqual(len(result), 1) + for item in result: + self.assertIn('id', item) + self.assertIn('name', item) + self.assertIn('currency', item) + self.assertIn('withdrawal_type', item) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_news.py b/admitad/tests/test_news.py new file mode 100644 index 0000000..d2a537c --- /dev/null +++ b/admitad/tests/test_news.py @@ -0,0 +1,91 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import News +from admitad.tests.base import BaseTestCase + + +class AnnouncementsTestCase(BaseTestCase): + + def test_get_announcements_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(News.URL, params={ + 'limit': 2, + 'offset': 2 + }), + match_querystring=True, + json={ + '_meta': { + 'count': 50, + 'limit': 2, + 'offset': 2 + }, + 'results': [{ + 'id': 12, + 'url': '', + 'language': 'en', + 'content': '

full text

', + 'short_content': 'short text', + 'advcampaign': { + 'id': 18, + 'name': 'AdvCamp' + }, + 'datetime': '2009-12-02T23:08:45' + }, { + 'id': 16, + 'url': '', + 'language': 'en', + 'content': '

full text 2

', + 'short_content': 'short text 2', + 'advcampaign': { + 'id': 18, + 'name': 'AdvCamp' + }, + 'datetime': '2009-12-02T23:09:00' + }] + }, + status=200 + ) + + result = self.client.News.get(limit=2, offset=2) + + self.assertIn('_meta', result) + self.assertIn('results', result) + self.assertEqual(2, len(result['results'])) + + def test_get_announcements_request_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(News.SINGLE_URL, news_id=12), + match_querystring=True, + json={ + 'id': 12, + 'url': '', + 'language': 'en', + 'content': '

full text

', + 'short_content': 'short text', + 'advcampaign': { + 'id': 18, + 'name': 'AdvCamp' + }, + 'datetime': '2009-12-02T23:08:45' + }, + status=200 + ) + + result = self.client.News.getOne(12) + + self.assertIn('id', result) + self.assertIn('url', result) + self.assertIn('content', result) + self.assertIn('datetime', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_optcodes.py b/admitad/tests/test_optcodes.py new file mode 100644 index 0000000..77e17aa --- /dev/null +++ b/admitad/tests/test_optcodes.py @@ -0,0 +1,137 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import OptCodes, OfferStatusOptCodesManager, ActionOptCodesManager +from admitad.tests.base import BaseTestCase + + +class OptCodeTestCase(BaseTestCase): + + def test_get_opt_codes_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(OptCodes.URL, params={ + 'campaign': 10, + 'website': 20, + 'limit': 1, + 'offset': 0, + 'order_by': ['method'] + }), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.OptCodes.get( + campaign=10, + website=20, + limit=1, + offset=0, + order_by=['method'] + ) + + self.assertIn('status', result) + + def test_get_opt_code_by_id_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(OptCodes.SINGLE_URL, optcode_id=12), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.OptCodes.getOne(12) + + self.assertIn('status', result) + + +class OffserStatusOptCodesManagerTestCase(BaseTestCase): + + def test_create_opt_code(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(OfferStatusOptCodesManager.CREATE_URL), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.OfferStatusOptCodesManager.create( + website=10, + campaign=20, + desc_mode=1, + url='https://google.com', + method=1 + ) + + self.assertIn('status', result) + + def test_update_opt_code(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(OfferStatusOptCodesManager.UPDATE_URL, optcode_id=2), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.OfferStatusOptCodesManager.update( + 2, + desc_mode=2, + url='https://google.com/', + method=2 + ) + + self.assertIn('status', result) + + +class ActionOptCodesManagerTestCase(BaseTestCase): + + def test_create_opt_code(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(ActionOptCodesManager.CREATE_URL), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.ActionOptCodesManager.create( + website=10, + campaign=20, + desc_mode=1, + url='https://google.com', + method=1, + action_type=1, + status=1 + ) + + self.assertIn('status', result) + + def test_update_opt_code(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(ActionOptCodesManager.UPDATE_URL, optcode_id=77), + match_querystring=True, + json={'status': 'ok'}, + status=200, + ) + result = self.client.ActionOptCodesManager.update( + 77, + desc_mode=2, + url='https://google.com/', + method=2, + action_type=2, + status=1 + ) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_payments.py b/admitad/tests/test_payments.py new file mode 100644 index 0000000..9dce90c --- /dev/null +++ b/admitad/tests/test_payments.py @@ -0,0 +1,107 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import Payments, PaymentsStatement, PaymentsManage +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from admitad.tests.base import BaseTestCase + + +class PaymentsTestCase(BaseTestCase): + + def test_get_payments_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Payments.URL, params={ + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Payments.get() + + self.assertIn('status', result) + + def test_get_payments_request_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Payments.SINGLE_URL, payment_id=167), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.Payments.getOne(167) + + self.assertIn('status', result) + + +class PaymentsStatementTestCase(BaseTestCase): + + def test_get_payments_statement_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(PaymentsStatement.URL, payment_id=12, params={ + 'detailed': 1, + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.PaymentsStatement.get(12, detailed=True) + + self.assertIn('status', result) + + +class PaymentsManageTestCase(BaseTestCase): + + def test_create_payments_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(PaymentsManage.CREATE_URL, code='USD'), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.PaymentsManage.create('USD') + + self.assertIn('status', result) + + def test_confirm_payments_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(PaymentsManage.CONFIRM_URL, payment_id=98), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.PaymentsManage.confirm(98) + + self.assertIn('status', result) + + def test_delete_payments_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(PaymentsManage.DELETE_URL, payment_id=98), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.PaymentsManage.delete(98) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_referrals.py b/admitad/tests/test_referrals.py new file mode 100644 index 0000000..d61425b --- /dev/null +++ b/admitad/tests/test_referrals.py @@ -0,0 +1,110 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +from datetime import datetime + +import responses + +from admitad.items import Referrals +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from admitad.tests.base import BaseTestCase + + +class ReferralsTestCase(BaseTestCase): + + def test_get_referrals_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Referrals.URL, params={ + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={ + '_meta': { + 'count': 2, + 'limit': 20, + 'offset': 0 + }, + 'results': [{ + 'id': 8, + 'payment': None, + 'username': 'username1' + }, { + 'id': 10, + 'payment': None, + 'username': 'username2' + }] + }, + status=200 + ) + + result = self.client.Referrals.get() + + self.assertIn('results', result) + self.assertIn('_meta', result) + self.assertIsInstance(result['results'], list) + self.assertIsInstance(result['_meta'], dict) + self.assertEqual(result['_meta']['limit'], 20) + + def test_get_referrals_with_filters_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Referrals.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.01.2020', + 'limit': 40, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={ + '_meta': { + 'count': 2, + 'limit': 40, + 'offset': 0 + }, + 'results': [{ + 'id': 8, + 'payment': None, + 'username': 'username1' + }, { + 'id': 10, + 'payment': None, + 'username': 'username2' + }] + }, + status=200 + ) + + result = self.client.Referrals.get(date_start=datetime(2010, 1, 1), date_end=datetime(2020, 1, 1), limit=40) + + self.assertIn('results', result) + self.assertIn('_meta', result) + self.assertIsInstance(result['results'], list) + self.assertIsInstance(result['_meta'], dict) + self.assertEqual(result['_meta']['limit'], 40) + + def test_get_referrals_request_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Referrals.SINGLE_URL, referral_id=8), + match_querystring=True, + json={ + 'id': 8, + 'payment': None, + 'username': 'username1' + }, + status=200 + ) + + result = self.client.Referrals.getOne(8) + + self.assertEqual(result['id'], 8) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_retag.py b/admitad/tests/test_retag.py new file mode 100644 index 0000000..29027a0 --- /dev/null +++ b/admitad/tests/test_retag.py @@ -0,0 +1,130 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import Retag, RetagManager +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from admitad.tests.base import BaseTestCase + + +class RetagTestCase(BaseTestCase): + + def test_retag_get_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Retag.URL, params={ + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET, + 'website': 10, + 'active': 1 + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Retag.get(website=10, active=True) + + self.assertIn('status', result) + + def test_retag_get_single_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Retag.SINGLE_URL, retag_id=11), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Retag.getOne(11) + + self.assertIn('status', result) + + def test_retag_get_levels_for_campaign_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Retag.LEVELS_FOR_CAMPAIGN_URL, campaign_id=20), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Retag.getLevelsForCampaign(20) + + self.assertIn('status', result) + + def test_retag_get_levels_for_website_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Retag.LEVELS_FOR_WEBSITE_URL, website_id=78), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Retag.getLevelsForWebsite(78) + + self.assertIn('status', result) + + +class ManageRetagTestCase(BaseTestCase): + + def test_retag_create(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(RetagManager.CREATE_URL), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.RetagManager.create( + website=10, + level=2, + active=False, + script='print', + comment='some comment' + ) + + self.assertIn('status', result) + + def test_retag_update(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(RetagManager.UPDATE_URL, retag_id=50), + json={'status': 'ok'}, + status=200 + ) + + result = self.client.RetagManager.update( + 50, + level=4, + active=True + ) + + self.assertIn('status', result) + + def test_retag_delete(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(RetagManager.DELETE_URL, retag_id=50), + json={'status': 'ok'}, + status=200 + ) + + result = self.client.RetagManager.delete(50) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_statistics.py b/admitad/tests/test_statistics.py new file mode 100644 index 0000000..5cd8726 --- /dev/null +++ b/admitad/tests/test_statistics.py @@ -0,0 +1,297 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import StatisticWebsites, StatisticCampaigns,\ + StatisticDays, StatisticMonths, StatisticActions, StatisticSubIds,\ + StatisticSources, StatisticKeywords +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from admitad.tests.base import BaseTestCase + + +class StatisticWebsitesTestCase(BaseTestCase): + + def test_get_statistic_websites_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticWebsites.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'subid': '1234567890987654321', + 'total': 200, + 'order_by': ['cr'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticWebsites.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + subid='1234567890987654321', + total=200, + order_by=['cr'] + ) + + self.assertIn('status', result) + + +class StatisticCampaignTestCase(BaseTestCase): + + def test_get_statistic_campaign_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticCampaigns.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'subid': '1234567890987654321', + 'total': 200, + 'order_by': ['cr'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticCampaigns.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + subid='1234567890987654321', + total=200, + order_by=['cr'] + ) + + self.assertIn('status', result) + + +class StatisticDaysTestCase(BaseTestCase): + + def test_get_statistic_days_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticDays.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'subid': '1234567890987654321', + 'total': 200, + 'order_by': ['cr'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticDays.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + subid='1234567890987654321', + total=200, + order_by=['cr'] + ) + + self.assertIn('status', result) + + +class StatisticMonthsTestCase(BaseTestCase): + + def test_get_statistic_months_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticMonths.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'subid': '1234567890987654321', + 'total': 200, + 'order_by': ['cr'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticMonths.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + subid='1234567890987654321', + total=200, + order_by=['cr'] + ) + + self.assertIn('status', result) + + +class StatisticActionsTestCase(BaseTestCase): + + def test_get_statistic_actions_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticActions.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'closing_date_start': '01.01.2010', + 'closing_date_end': '01.02.2010', + 'status_updated_start': '01.01.2010 10:10:10', + 'status_updated_end': '01.02.2010 10:10:10', + 'website': 10, + 'campaign': 20, + 'subid': '1234567890987654321', + 'subid1': '1234567890987654321', + 'subid4': '1234567890987654321', + 'status': 1, + 'keyword': 'foo', + 'action': 'lead', + 'action_type': 'lead', + 'action_id': 27, + 'order_by': ['status'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticActions.get( + date_start='01.01.2010', + date_end='01.02.2010', + closing_date_start='01.01.2010', + closing_date_end='01.02.2010', + status_updated_start='01.01.2010 10:10:10', + status_updated_end='01.02.2010 10:10:10', + website=10, + campaign=20, + subid='1234567890987654321', + subid1='1234567890987654321', + subid4='1234567890987654321', + status=1, + keyword='foo', + action='lead', + action_type='lead', + action_id=27, + order_by=['status'] + ) + + self.assertIn('status', result) + + +class StatisticSubIdsTestCase(BaseTestCase): + + def test_get_statistic_sub_ids_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticSubIds.URL, subid_number='', params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'subid1': '123567', + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticSubIds.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + subid1='123567' + ) + + self.assertIn('status', result) + + +class StatisticSourcesTestCase(BaseTestCase): + + def test_get_statistic_sources_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticSources.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 22, + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticSources.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=22 + ) + + self.assertIn('status', result) + + +class StatisticKeywordsTestCase(BaseTestCase): + + def test_get_statistic_keywords_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(StatisticKeywords.URL, params={ + 'date_start': '01.01.2010', + 'date_end': '01.02.2010', + 'website': 10, + 'campaign': 20, + 'source': 'g', + 'order_by': ['cr', 'ecpc'], + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + result = self.client.StatisticKeywords.get( + date_start='01.01.2010', + date_end='01.02.2010', + website=10, + campaign=20, + source='g', + order_by=['cr', 'ecpc'] + ) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_tickets.py b/admitad/tests/test_tickets.py new file mode 100644 index 0000000..e4d3892 --- /dev/null +++ b/admitad/tests/test_tickets.py @@ -0,0 +1,89 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import Tickets, TicketsManager +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET +from admitad.tests.base import BaseTestCase + + +class TicketsTestCase(BaseTestCase): + + def test_get_tickets_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Tickets.URL, params={ + 'status': 1, + 'date_start': '01.01.2010', + 'date_end': '01.01.2020', + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Tickets.get( + status=1, + date_start='01.01.2010', + date_end='01.01.2020' + ) + + self.assertIn('status', result) + + def test_get_single_ticket_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Tickets.SINGLE_URL, ticket_id=22), + match_querystring=True, + json={'status': 'ok'}, + status=200 + ) + + result = self.client.Tickets.getOne(22) + + self.assertIn('status', result) + + +class ManageTicketsTestCase(BaseTestCase): + + def test_create(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(TicketsManager.CREATE_URL), + json={'status': 'ok'}, + status=200 + ) + + result = self.client.TicketsManager.create( + subject='foo', + text='bar', + campaign=90, + category=20, + priority=2, + ) + + self.assertIn('status', result) + + def test_commenting(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(TicketsManager.COMMENT_URL, ticket_id=276), + json={'status': 'ok'}, + status=200 + ) + + result = self.client.TicketsManager.comment(276, text='comment text') + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_transport.py b/admitad/tests/test_transport.py new file mode 100644 index 0000000..4c83a31 --- /dev/null +++ b/admitad/tests/test_transport.py @@ -0,0 +1,300 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +from datetime import datetime + +import responses + +from admitad.transport import oauth_client_authorization, get_credentials, build_headers, \ + prepare_request_data, api_request, oauth_refresh_access_token, HttpTransport +from admitad.constants import DEFAULT_REQUEST_TIMEOUT, DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET, \ + BASE_URL, TOKEN_URL +from admitad.exceptions import HttpException +from admitad.tests.base import BaseTestCase + + +class BaseTransportTestCase(BaseTestCase): + + def test_get_credentials(self): + self.assertEqual(get_credentials('foobarbaz', '123456789'), 'Zm9vYmFyYmF6OjEyMzQ1Njc4OQ==') + + def test_build_headers(self): + self.assertDictEqual(build_headers('foobarbaz', user_agent='test_bot'), { + 'Authorization': 'Bearer foobarbaz', + 'Connection': 'Keep-Alive', + 'User-Agent': 'test_bot', + }) + + def test_prepare_request_data(self): + data = prepare_request_data({'foo': 42}, None, 'GET', timeout=10) + + self.assertDictEqual(data, { + 'headers': {}, + 'timeout': 10, + 'verify': False, + 'allow_redirects': True, + 'params': {'foo': 42} + }) + + data = prepare_request_data({'foo': 42}, None, 'POST') + + self.assertDictEqual(data, { + 'headers': {}, + 'timeout': DEFAULT_REQUEST_TIMEOUT, + 'verify': False, + 'allow_redirects': True, + 'data': {'foo': 42} + }) + + data = prepare_request_data({'foo': [None, None, 11]}, None, 'GET') + + self.assertDictEqual(data, { + 'headers': {}, + 'timeout': DEFAULT_REQUEST_TIMEOUT, + 'verify': False, + 'allow_redirects': True, + 'params': {'foo': [11]} + }) + + def test_api_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + 'http://example.com/', + json={ + 'status': 'ok' + }, + status=200 + ) + + result = api_request('http://example.com/') + + self.assertIn('status', result) + self.assertEqual('ok', result['status']) + + def test_api_request_404(self): + with self.assertRaises(HttpException): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + 'http://example.com/', + json={}, + status=400 + ) + + api_request('http://example.com/') + + def test_api_request_get(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + 'http://example.com/?bar=1&baz=0', + match_querystring=True, + json={ + 'success': 'ok' + }, + status=200 + ) + + result = api_request('http://example.com/', data={ + 'foo': [None], + 'bar': 1, + 'baz': 0 + }) + + self.assertIn('success', result) + + def test_oauth_refresh_access_token(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + TOKEN_URL, + json={ + 'access_token': 'access_token', + 'expires_in': '604800', + 'refresh_token': 'refresh', + 'token_type': 'bearer', + 'username': 'username', + 'first_name': 'first_name', + 'last_name': 'second_name', + 'language': 'en', + }, + status=200 + ) + + result = oauth_refresh_access_token({ + 'client_id': 'client_id', + 'client_secret': 'secret', + 'refresh_token': 'r_token', + }) + + self.assertIn('access_token', result) + self.assertIn('expires_in', result) + self.assertIn('refresh_token', result) + self.assertIn('username', result) + self.assertIn('first_name', result) + self.assertIn('last_name', result) + self.assertIn('language', result) + + def test_oauth_client_authorization(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + TOKEN_URL, + json={ + 'access_token': 'access_token', + 'expires_in': '604800', + 'refresh_token': 'refresh', + 'token_type': 'bearer', + 'username': 'username', + 'first_name': 'first_name', + 'last_name': 'second_name', + 'language': 'en', + 'scope': 'pricate_data', + }, + status=200 + ) + + result = oauth_client_authorization({ + 'client_id': 'client_id', + 'client_secret': 'secret', + 'scopes': 'private_data', + }) + + self.assertIn('access_token', result) + self.assertIn('expires_in', result) + self.assertIn('refresh_token', result) + self.assertIn('username', result) + self.assertIn('first_name', result) + self.assertIn('last_name', result) + self.assertIn('language', result) + + +class TransportTestCase(BaseTestCase): + + def test_set_default_pagination(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BASE_URL, params={ + 'limit': DEFAULT_PAGINATION_LIMIT, + 'offset': DEFAULT_PAGINATION_OFFSET + }), + match_querystring=True, + json={ + 'status': 'ok' + }, + status=200 + ) + + result = HttpTransport('access_token').get() \ + .set_pagination() \ + .request(url=BASE_URL) + + self.assertIn('status', result) + + def test_set_pagination(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BASE_URL, params={ + 'limit': 120, + 'offset': 100 + }), + match_querystring=True, + json={ + 'status': 'ok' + }, + status=200 + ) + + result = HttpTransport('access_token').get() \ + .set_pagination(limit=120, offset=100) \ + .request(url=BASE_URL) + + self.assertIn('status', result) + + def test_set_ordering(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BASE_URL, params={ + 'order_by': 'name' + }), + match_querystring=True, + json={ + 'status': 'ok' + }, + status=200 + ) + + result = HttpTransport('access_token').get() \ + .set_ordering(ordering={ + 'order_by': 'name', + 'available': ['name'] + }) \ + .request(url=BASE_URL) + + self.assertIn('status', result) + + def test_set_multiple_ordering(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BASE_URL, params={ + 'order_by': ['name', '-date_updated'] + }), + match_querystring=True, + json={ + 'status': 'ok' + }, + status=200 + ) + + result = HttpTransport('access_token').get() \ + .set_ordering(ordering={ + 'order_by': [None, 'name', '-date_updated'], + 'available': ['name', 'date_updated'] + }) \ + .request(url=BASE_URL) + + self.assertIn('status', result) + + def test_set_filtering(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(BASE_URL, params={ + 'name': 'FOOBARBAZ', + 'foo': 42, + 'date_start': '01.01.2020', + }), + match_querystring=True, + json={ + 'status': 'ok' + }, + status=200 + ) + + result = HttpTransport('access_token').get() \ + .set_filtering(filtering={ + 'filter_by': { + 'name': 'foobarbaz', + 'foo': 42, + 'date_start': datetime(2020, 1, 1), + 'some': 12, + }, + 'available': { + 'name': lambda x: x.upper(), + 'foo': lambda x: x, + 'date_start': lambda x: x.strftime('%d.%m.%Y'), + } + }) \ + .request(url=BASE_URL) + + self.assertIn('status', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/tests/test_websites.py b/admitad/tests/test_websites.py new file mode 100644 index 0000000..c95a3d9 --- /dev/null +++ b/admitad/tests/test_websites.py @@ -0,0 +1,268 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import unittest +import responses + +from admitad.items import Websites, WebsitesManage +from admitad.tests.base import BaseTestCase + + +class WebsitesTestCase(BaseTestCase): + + def test_get_websites_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Websites.URL, params={ + 'limit': 1, + 'offset': 2 + }), + match_querystring=True, + json={ + 'results': [{ + 'id': 4, + 'status': 'active', + 'kind': 'website', + 'name': 'FooName', + 'categories': [1, 2], + 'adservice': None, + 'creation_date': '2010-04-17T21:54:45', + 'description': '', + 'is_old': True, + 'mailing_targeting': False, + 'regions': ['RU'], + 'site_url': 'https://foo.bar/', + 'validation_passed': False, + 'verification_code': '11c0sd4d14', + 'atnd_hits': 122, + 'atnd_visits': 10, + }], + '_meta': { + 'limit': 1, + 'offset': 2, + 'count': 9, + } + }, + status=200 + ) + + result = self.client.Websites.get(limit=1, offset=2) + + self.assertEqual(len(result['results']), 1) + self.assertIn('count', result['_meta']) + for item in result['results']: + self.assertIn('id', item) + self.assertIn('kind', item) + self.assertIn('status', item) + self.assertIn('name', item) + self.assertIn('categories', item) + self.assertIn('adservice', item) + self.assertIn('creation_date', item) + self.assertIn('description', item) + self.assertIn('is_old', item) + self.assertIn('mailing_targeting', item) + self.assertIn('regions', item) + self.assertIn('site_url', item) + self.assertIn('validation_passed', item) + self.assertIn('verification_code', item) + self.assertIn('atnd_hits', item) + self.assertIn('atnd_visits', item) + + def test_get_websites_request_with_id(self): + with responses.RequestsMock() as resp: + resp.add( + resp.GET, + self.prepare_url(Websites.SINGLE_URL, website_id=4), + json={ + 'id': 4, + 'status': 'active', + 'kind': 'website', + 'name': 'FooName', + 'categories': [{ + 'id': 1, + 'language': 'en', + 'name': 'Cat1', + 'parent': None + }, { + 'id': 2, + 'language': 'en', + 'name': 'Cat2', + 'parent': None + }], + 'adservice': None, + 'creation_date': '2010-04-17T21:54:45', + 'description': '', + 'is_old': True, + 'mailing_targeting': False, + 'regions': ['RU'], + 'site_url': 'https://foo.bar/', + 'validation_passed': False, + 'verification_code': '11c0sd4d14', + 'atnd_hits': 122, + 'atnd_visits': 10, + }, + status=200 + ) + + result = self.client.Websites.getOne(4) + + self.assertIn('id', result) + self.assertIn('kind', result) + self.assertIn('status', result) + self.assertIn('name', result) + self.assertIn('categories', result) + self.assertIn('adservice', result) + self.assertIn('creation_date', result) + self.assertIn('description', result) + self.assertIn('is_old', result) + self.assertIn('mailing_targeting', result) + self.assertIn('regions', result) + self.assertIn('site_url', result) + self.assertIn('validation_passed', result) + self.assertIn('verification_code', result) + self.assertIn('atnd_hits', result) + self.assertIn('atnd_visits', result) + + +class WebsitesManageTestCase(BaseTestCase): + + def test_create_website_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(WebsitesManage.CREATE_URL), + match_querystring=True, + json={ + 'id': 42, + 'status': 'new', + 'kind': 'website', + 'name': 'FooBar', + 'categories': [{ + 'id': 1, + 'language': 'en', + 'name': 'Cat1', + 'parent': None + }, { + 'id': 2, + 'language': 'en', + 'name': 'Cat2', + 'parent': None + }], + 'adservice': None, + 'creation_date': '2016-10-10T11:54:45', + 'description': 'Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry\'s standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.', + 'is_old': False, + 'mailing_targeting': True, + 'regions': ['RU'], + 'site_url': 'https://foobar.bar/', + 'validation_passed': False, + 'verification_code': '244a5d4a14', + 'atnd_hits': 500, + 'atnd_visits': 100, + }, + status=200 + ) + + result = self.client.WebsitesManage.create( + name='FooBar', + kind='website', + language='en', + site_url='https://foobar.baz/', + description='Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry\'s standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.', + categories=[1, 2], + regions=['RU'], + atnd_visits=500, + atnd_hits=100, + mailing_targeting=True + ) + + self.assertIn('id', result) + self.assertIn('name', result) + self.assertIn('status', result) + self.assertIn('kind', result) + self.assertIn('verification_code', result) + + def test_update_website_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(WebsitesManage.UPDATE_URL, website_id=42), + json={ + 'id': 42, + 'status': 'new', + 'kind': 'website', + 'name': 'FooBarBaz', + 'categories': [{ + 'id': 1, + 'language': 'en', + 'name': 'Cat1', + 'parent': None + }, { + 'id': 2, + 'language': 'en', + 'name': 'Cat2', + 'parent': None + }], + 'adservice': None, + 'creation_date': '2016-10-10T11:54:45', + 'description': 'Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry\'s standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.', + 'is_old': False, + 'mailing_targeting': True, + 'regions': ['RU'], + 'site_url': 'https://foobar.bar/', + 'validation_passed': False, + 'verification_code': '244a5d4a14', + 'atnd_hits': 1000, + 'atnd_visits': 100, + }, + status=200 + ) + + result = self.client.WebsitesManage.update( + 42, + name='FooBarBaz', + atnd_visits=1000, + ) + + self.assertIn('id', result) + self.assertIn('name', result) + self.assertIn('atnd_visits', result) + + def test_verify_website_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(WebsitesManage.VERIFY_URL, website_id=42), + json={ + 'message': 'Message', + 'success': 'Accepted' + }, + status=200 + ) + + result = self.client.WebsitesManage.verify(42) + + self.assertIn('message', result) + self.assertIn('success', result) + + def test_delete_website_request(self): + with responses.RequestsMock() as resp: + resp.add( + resp.POST, + self.prepare_url(WebsitesManage.DELETE_URL, website_id=42), + json={ + 'message': 'Message', + 'success': 'Deleted' + }, + status=200 + ) + + result = self.client.WebsitesManage.delete(42) + + self.assertIn('message', result) + self.assertIn('success', result) + + +if __name__ == '__main__': + unittest.main() diff --git a/admitad/transport.py b/admitad/transport.py new file mode 100644 index 0000000..bb3d1de --- /dev/null +++ b/admitad/transport.py @@ -0,0 +1,278 @@ +# coding: utf-8 +from __future__ import unicode_literals + +import json +import logging +from base64 import b64encode + +import requests + +from admitad.constants import DEFAULT_PAGINATION_LIMIT, DEFAULT_PAGINATION_OFFSET, \ + DEFAULT_REQUEST_TIMEOUT, MAX_PAGINATION_LIMIT, TOKEN_URL +from admitad.exceptions import HttpException, ConnectionException, JsonException + +LOG = logging.getLogger(__file__) +LOG.addHandler(logging.StreamHandler()) + + +def to_json(content): + try: + return json.loads(content) + except (TypeError, ValueError): + return content + + +def debug_log(value, debug=True): + if debug: + LOG.setLevel(logging.DEBUG) + LOG.debug(value) + else: + LOG.setLevel(logging.NOTSET) + + +def get_credentials(client_id, client_secret): + return b64encode( + ('%s:%s' % (client_id, client_secret)).encode('utf-8') + ).decode('utf-8') + + +def build_headers(access_token, user_agent=None): + headers = { + 'Authorization': 'Bearer %s' % access_token, + 'Connection': 'Keep-Alive', + } + + if user_agent: + headers['User-Agent'] = user_agent + return headers + + +def prepare_data(data=None): + if data: + new_data = {} + for key, value in data.items(): + if isinstance(value, (list, tuple, set)): + new_data[key] = [item for item in value if item is not None] + else: + new_data[key] = value if value is not None else None + return new_data + return data + + +def prepare_request_data(data=None, headers=None, method='GET', + timeout=None, ssl_verify=False): + kwargs = { + 'headers': headers if headers is not None else {}, + 'timeout': timeout if timeout is not None else DEFAULT_REQUEST_TIMEOUT, + 'verify': ssl_verify, + 'allow_redirects': True, + } + + prepared_data = prepare_data(data) + + if method in ['POST', 'PUT']: + kwargs['data'] = prepared_data + if method in ['GET', 'DELETE']: + kwargs['params'] = prepared_data + + return kwargs + + +def api_request(url, data=None, headers=None, method='GET', + files=None, timeout=None, ssl_verify=True, debug=False): + kwargs = prepare_request_data(data=data, headers=headers, method=method, + timeout=timeout, ssl_verify=ssl_verify) + status_code = 500 + content = '' + try: + response = requests.request(method, url, files=files, **kwargs) + debug_log('Request url: %s' % response.url, debug) + # if method == 'POST': + # debug_log('Request body: %s' % response.request.body, debug) + status_code = response.status_code + content = response.content + if status_code >= 400: + response.raise_for_status() + except requests.HTTPError as err: + raise HttpException(status_code, to_json(content), err) + except requests.RequestException as err: + raise ConnectionException(err) + except (ValueError, TypeError) as err: + raise JsonException(err) + return response.json() + + +def oauth_refresh_access_token(data): + """ + refresh an access token. Returns dictionary with new access_token. + data['access-token'] + The function parameter should be a dictionary with next structure: + data = { + 'refresh_token': '', + 'client_secret': '', + 'client_id': '' + } + """ + refresh_token = data['refresh_token'] + client_id = data['client_id'] + client_secret = data['client_secret'] + params = { + 'grant_type': 'refresh_token', + 'client_id': client_id, + 'client_secret': client_secret, + 'refresh_token': refresh_token + } + headers = {'Content-Type': 'application/x-www-form-urlencoded'} + return api_request(TOKEN_URL, method='POST', data=params, headers=headers) + + +def oauth_client_authorization(data): + """ + OAuth2 client authorization. + Used to get an access_token with the oauth client credentials + The function parameter should be a dictionary with next structure: + data = { + 'client_secret': '', + 'client_id': '' + 'scopes': '', + } + """ + client_id = data['client_id'] + client_secret = data['client_secret'] + params = { + 'grant_type': 'client_credentials', + 'client_id': client_id, + 'scope': data['scopes'] + } + credentials = get_credentials(client_id, client_secret) + headers = { + 'Content-Type': 'application/x-www-form-urlencoded', + 'Authorization': 'Basic %s' % credentials + } + return api_request(TOKEN_URL, method='POST', data=params, headers=headers) + + +class HttpTransport(object): + + SUPPORTED_METHODS = ('GET', 'POST', 'DELETE', 'PUT') + + def __init__(self, access_token, user_agent=None, debug=False): + self._headers = build_headers(access_token, user_agent=user_agent) + self._method = 'GET' + self._files = None + self._data = None + self._url = None + self._debug = debug + + def set_method(self, method): + if method in self.SUPPORTED_METHODS: + self._method = method + else: + raise AttributeError('This http method "%s" is not supported' % method) + # here we should clean data + return self.clean_data() + + def get(self): + return self.set_method('GET') + + def post(self): + return self.set_method('POST') + + def put(self): + return self.set_method('PUT') + + def delete(self): + return self.set_method('DELETE') + + def set_debug(self, debug): + self._debug = debug + return self + + def set_url(self, url, **kwargs): + self._url = url % kwargs + return self + + def set_data(self, data): + self._data = data + return self + + def clean_data(self): + self._data = None + return self + + def update_data(self, values): + if self._data is None: + self._data = {} + self._data.update(values) + return self + + def set_files(self, files): + self._files = files + return self + + def set_pagination(self, **kwargs): + limit = kwargs.get('limit', DEFAULT_PAGINATION_LIMIT) + offset = kwargs.get('offset', DEFAULT_PAGINATION_OFFSET) + + data = { + 'limit': limit if 0 < limit <= MAX_PAGINATION_LIMIT else DEFAULT_PAGINATION_LIMIT, + 'offset': offset if offset > 0 else DEFAULT_PAGINATION_OFFSET, + } + + return self.update_data(data) + + def set_ordering(self, ordering): + order_by = ordering.get('order_by', []) + available = ordering.get('available', []) + + if not isinstance(order_by, (list, tuple, set)): + order_by = [order_by] + + data = { + 'order_by': [item for item in order_by if item is not None and + (item[1:] if item[0] == '-' else item) in available] + } + + return self.update_data(data) + + def set_filtering(self, filtering): + filter_by = filtering.get('filter_by', {}) + available = filtering.get('available', {}) + + data = {key: available[key](value) for key, value in filter_by.items() if key in available} + + return self.update_data(data) + + def request(self, **kwargs): + if 'url' in kwargs: + self.set_url(kwargs.pop('url'), **kwargs) + if 'debug' in kwargs: + self.set_debug(kwargs.pop('debug')) + if not self._url: + raise AttributeError( + 'Absent url parameter. Use set_url method or pass ' + 'url parameter in this method.' + ) + + requests_kwargs = { + 'method': self._method, + 'headers': self._headers, + 'data': self._data, + 'debug': self._debug, + 'files': self._files, + } + response = HttpTransport.api_request(self._url, **requests_kwargs) + handler = kwargs.get('handler', self._handle_response) + + return handler(response) + + @staticmethod + def api_request(url, **kwargs): + return api_request(url, **kwargs) + + @staticmethod + def _handle_response(response): + return response + + def __call__(self, **kwargs): + return self.request(**kwargs) diff --git a/ez_setup.py b/ez_setup.py deleted file mode 100644 index 72d35a5..0000000 --- a/ez_setup.py +++ /dev/null @@ -1,382 +0,0 @@ -#!python -"""Bootstrap setuptools installation - -If you want to use setuptools in your package's setup.py, just include this -file in the same directory with it, and add this to the top of your setup.py:: - - from ez_setup import use_setuptools - use_setuptools() - -If you want to require a specific version of setuptools, set a download -mirror, or use an alternate download directory, you can do so by supplying -the appropriate options to ``use_setuptools()``. - -This file can also be run as a script to install or upgrade setuptools. -""" -import os -import shutil -import sys -import tempfile -import tarfile -import optparse -import subprocess -import platform - -from distutils import log - -try: - from site import USER_SITE -except ImportError: - USER_SITE = None - -DEFAULT_VERSION = "1.4" -DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" - -def _python_cmd(*args): - args = (sys.executable,) + args - return subprocess.call(args) == 0 - -def _check_call_py24(cmd, *args, **kwargs): - res = subprocess.call(cmd, *args, **kwargs) - class CalledProcessError(Exception): - pass - if not res == 0: - msg = "Command '%s' return non-zero exit status %d" % (cmd, res) - raise CalledProcessError(msg) -vars(subprocess).setdefault('check_call', _check_call_py24) - -def _install(tarball, install_args=()): - # extracting the tarball - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - tar = tarfile.open(tarball) - _extractall(tar) - tar.close() - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - - # installing - log.warn('Installing Setuptools') - if not _python_cmd('setup.py', 'install', *install_args): - log.warn('Something went wrong during the installation.') - log.warn('See the error message above.') - # exitcode will be 2 - return 2 - finally: - os.chdir(old_wd) - shutil.rmtree(tmpdir) - - -def _build_egg(egg, tarball, to_dir): - # extracting the tarball - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - tar = tarfile.open(tarball) - _extractall(tar) - tar.close() - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - - # building an egg - log.warn('Building a Setuptools egg in %s', to_dir) - _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) - - finally: - os.chdir(old_wd) - shutil.rmtree(tmpdir) - # returning the result - log.warn(egg) - if not os.path.exists(egg): - raise IOError('Could not build the egg.') - - -def _do_download(version, download_base, to_dir, download_delay): - egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' - % (version, sys.version_info[0], sys.version_info[1])) - if not os.path.exists(egg): - tarball = download_setuptools(version, download_base, - to_dir, download_delay) - _build_egg(egg, tarball, to_dir) - sys.path.insert(0, egg) - - # Remove previously-imported pkg_resources if present (see - # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). - if 'pkg_resources' in sys.modules: - del sys.modules['pkg_resources'] - - import setuptools - setuptools.bootstrap_install_from = egg - - -def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, download_delay=15): - # making sure we use the absolute path - to_dir = os.path.abspath(to_dir) - was_imported = 'pkg_resources' in sys.modules or \ - 'setuptools' in sys.modules - try: - import pkg_resources - except ImportError: - return _do_download(version, download_base, to_dir, download_delay) - try: - pkg_resources.require("setuptools>=" + version) - return - except pkg_resources.VersionConflict: - e = sys.exc_info()[1] - if was_imported: - sys.stderr.write( - "The required version of setuptools (>=%s) is not available,\n" - "and can't be installed while this script is running. Please\n" - "install a more recent version first, using\n" - "'easy_install -U setuptools'." - "\n\n(Currently using %r)\n" % (version, e.args[0])) - sys.exit(2) - else: - del pkg_resources, sys.modules['pkg_resources'] # reload ok - return _do_download(version, download_base, to_dir, - download_delay) - except pkg_resources.DistributionNotFound: - return _do_download(version, download_base, to_dir, - download_delay) - -def _clean_check(cmd, target): - """ - Run the command to download target. If the command fails, clean up before - re-raising the error. - """ - try: - subprocess.check_call(cmd) - except subprocess.CalledProcessError: - if os.access(target, os.F_OK): - os.unlink(target) - raise - -def download_file_powershell(url, target): - """ - Download the file at url to target using Powershell (which will validate - trust). Raise an exception if the command cannot complete. - """ - target = os.path.abspath(target) - cmd = [ - 'powershell', - '-Command', - "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(), - ] - _clean_check(cmd, target) - -def has_powershell(): - if platform.system() != 'Windows': - return False - cmd = ['powershell', '-Command', 'echo test'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except: - return False - finally: - devnull.close() - return True - -download_file_powershell.viable = has_powershell - -def download_file_curl(url, target): - cmd = ['curl', url, '--silent', '--output', target] - _clean_check(cmd, target) - -def has_curl(): - cmd = ['curl', '--version'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except: - return False - finally: - devnull.close() - return True - -download_file_curl.viable = has_curl - -def download_file_wget(url, target): - cmd = ['wget', url, '--quiet', '--output-document', target] - _clean_check(cmd, target) - -def has_wget(): - cmd = ['wget', '--version'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except: - return False - finally: - devnull.close() - return True - -download_file_wget.viable = has_wget - -def download_file_insecure(url, target): - """ - Use Python to download the file, even though it cannot authenticate the - connection. - """ - try: - from urllib.request import urlopen - except ImportError: - from urllib2 import urlopen - src = dst = None - try: - src = urlopen(url) - # Read/write all in one block, so we don't create a corrupt file - # if the download is interrupted. - data = src.read() - dst = open(target, "wb") - dst.write(data) - finally: - if src: - src.close() - if dst: - dst.close() - -download_file_insecure.viable = lambda: True - -def get_best_downloader(): - downloaders = [ - download_file_powershell, - download_file_curl, - download_file_wget, - download_file_insecure, - ] - - for dl in downloaders: - if dl.viable(): - return dl - -def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, delay=15, - downloader_factory=get_best_downloader): - """Download setuptools from a specified location and return its filename - - `version` should be a valid setuptools version number that is available - as an egg for download under the `download_base` URL (which should end - with a '/'). `to_dir` is the directory where the egg will be downloaded. - `delay` is the number of seconds to pause before an actual download - attempt. - - ``downloader_factory`` should be a function taking no arguments and - returning a function for downloading a URL to a target. - """ - # making sure we use the absolute path - to_dir = os.path.abspath(to_dir) - tgz_name = "setuptools-%s.tar.gz" % version - url = download_base + tgz_name - saveto = os.path.join(to_dir, tgz_name) - if not os.path.exists(saveto): # Avoid repeated downloads - log.warn("Downloading %s", url) - downloader = downloader_factory() - downloader(url, saveto) - return os.path.realpath(saveto) - - -def _extractall(self, path=".", members=None): - """Extract all members from the archive to the current working - directory and set owner, modification time and permissions on - directories afterwards. `path' specifies a different directory - to extract to. `members' is optional and must be a subset of the - list returned by getmembers(). - """ - import copy - import operator - from tarfile import ExtractError - directories = [] - - if members is None: - members = self - - for tarinfo in members: - if tarinfo.isdir(): - # Extract directories with a safe mode. - directories.append(tarinfo) - tarinfo = copy.copy(tarinfo) - tarinfo.mode = 448 # decimal for oct 0700 - self.extract(tarinfo, path) - - # Reverse sort directories. - if sys.version_info < (2, 4): - def sorter(dir1, dir2): - return cmp(dir1.name, dir2.name) - directories.sort(sorter) - directories.reverse() - else: - directories.sort(key=operator.attrgetter('name'), reverse=True) - - # Set correct owner, mtime and filemode on directories. - for tarinfo in directories: - dirpath = os.path.join(path, tarinfo.name) - try: - self.chown(tarinfo, dirpath) - self.utime(tarinfo, dirpath) - self.chmod(tarinfo, dirpath) - except ExtractError: - e = sys.exc_info()[1] - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) - - -def _build_install_args(options): - """ - Build the arguments to 'python setup.py install' on the setuptools package - """ - install_args = [] - if options.user_install: - if sys.version_info < (2, 6): - log.warn("--user requires Python 2.6 or later") - raise SystemExit(1) - install_args.append('--user') - return install_args - -def _parse_args(): - """ - Parse the command line for options - """ - parser = optparse.OptionParser() - parser.add_option( - '--user', dest='user_install', action='store_true', default=False, - help='install in user site package (requires Python 2.6 or later)') - parser.add_option( - '--download-base', dest='download_base', metavar="URL", - default=DEFAULT_URL, - help='alternative URL from where to download the setuptools package') - parser.add_option( - '--insecure', dest='downloader_factory', action='store_const', - const=lambda: download_file_insecure, default=get_best_downloader, - help='Use internal, non-validating downloader' - ) - options, args = parser.parse_args() - # positional arguments are ignored - return options - -def main(version=DEFAULT_VERSION): - """Install or upgrade setuptools and EasyInstall""" - options = _parse_args() - tarball = download_setuptools(download_base=options.download_base, - downloader_factory=options.downloader_factory) - return _install(tarball, _build_install_args(options)) - -if __name__ == '__main__': - sys.exit(main()) diff --git a/pyadmitad/api.py b/pyadmitad/api.py deleted file mode 100644 index 9d69943..0000000 --- a/pyadmitad/api.py +++ /dev/null @@ -1,41 +0,0 @@ -from pyadmitad import client, transport - - -def get_authorizing_client(access_token, user_agent=None, debug=False): - """ - Creates a client using an access token. - """ - http_transport = transport.HttpTransport( - access_token, user_agent=user_agent, debug=debug) - return client.Client(http_transport) - - -def get_oauth_password_client( - client_id, client_secret, - username, password, scopes, user_agent=None, debug=False): - auth = transport.oauth_password_authorization({ - 'client_id': client_id, - 'client_secret': client_secret, - 'username': username, - 'password': password, - 'scopes': scopes - }) - return get_authorizing_client( - auth['access_token'], user_agent=user_agent, debug=debug) - - -def get_oauth_client_client( - client_id, client_secret, scopes, user_agent=None, debug=False): - auth = transport.oauth_client_authorization({ - 'client_id': client_id, - 'client_secret': client_secret, - 'scopes': scopes - }) - return get_authorizing_client( - auth['access_token'], user_agent=user_agent, debug=debug) - - -def get_oauth_client(access_token, user_agent=None): - return get_authorizing_client(access_token, user_agent=user_agent) - - diff --git a/pyadmitad/constants.py b/pyadmitad/constants.py deleted file mode 100644 index 8e0cd64..0000000 --- a/pyadmitad/constants.py +++ /dev/null @@ -1,15 +0,0 @@ -CURRENCIES = ('USD', 'RUB', 'EUR') -# API date-format -DATE_FORMAT = "%d.%m.%Y" -LONG_DATE_FORMAT = "%d.%m.%Y %H:%M:%S" - -# default values -DEFAULT_REQUEST_TIMEOUT = 60 -DEFAULT_LANGUAGE = 'ru' -MAX_PAGINATION_LIMIT = 200 -SUB_ID_MAX_LENGTH = 50 - -# urls -BASE_URL = 'https://api.admitad.com/' -AUTHORIZE_URL = '%s%s' % (BASE_URL, 'authorize/') -TOKEN_URL = '%s%s' % (BASE_URL, 'token/') diff --git a/pyadmitad/items/__init__.py b/pyadmitad/items/__init__.py deleted file mode 100644 index b08849e..0000000 --- a/pyadmitad/items/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -from pyadmitad.items.me import * -from pyadmitad.items.auxiliary import * -from pyadmitad.items.coupons import * -from pyadmitad.items.websites import * -from pyadmitad.items.statistics import * -from pyadmitad.items.referrals import * -from pyadmitad.items.banners import * -from pyadmitad.items.campaigns import * -from pyadmitad.items.products import * -from pyadmitad.items.announcements import * -from pyadmitad.items.payments import * -from pyadmitad.items.money_transfers import * diff --git a/pyadmitad/items/announcements.py b/pyadmitad/items/announcements.py deleted file mode 100644 index 875b659..0000000 --- a/pyadmitad/items/announcements.py +++ /dev/null @@ -1,54 +0,0 @@ -from pyadmitad.items.base import Item - - -__all__ = ( - 'Announcements', - 'AnnouncementsManage' -) - - -class Announcements(Item): - """ - List of announcements - - Required scope - "announcements" - """ - URL = Item.prepare_url('announcements') - SINGLE_URL = Item.prepare_url('announcements/%(id)s') - - def get(self, **kwargs): - """ - res = client.Announcements.get() - res = client.Announcements.get(limit=1, offset=2) - """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - Here _id is an announcement id - - res = client.Announcements.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) - - -class AnnouncementsManage(Item): - """ - manage of announcements - - Required scope - "manage_announcements" - """ - DELETE_URL = Item.prepare_url('announcements/delete/%(id)s/') - - def delete(self, _id, **kwargs): - """ - Here _id is an announcement id - - res = client.AnnouncementsManage.delete(12) - """ - kwargs['url'] = self.DELETE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('POST').request(**kwargs) diff --git a/pyadmitad/items/auxiliary.py b/pyadmitad/items/auxiliary.py deleted file mode 100644 index 215199d..0000000 --- a/pyadmitad/items/auxiliary.py +++ /dev/null @@ -1,177 +0,0 @@ -from pyadmitad.items.base import Item - - -__all__ = ( - 'WebsiteTypes', - 'WebsiteRegions', - 'SystemLanguages', - 'SystemCurrencies', - 'AdvertiserServices', - 'CampaignCategories', -) - - -class WebsiteTypes(Item): - """ - List of websites types - - Required scope - "public_data" - """ - - URL = Item.prepare_url('websites/kinds') - - def get(self, **kwargs): - """ - res = client.WebsiteTypes.get() - res = client.WebsiteTypes.get(limit=2, offset=1) - res = client.WebsiteTypes.get(limit=2, offset=1, language='ru') - """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) - - -class WebsiteRegions(Item): - """ - List of websites regions - - Required scope - "public_data" - """ - - URL = Item.prepare_url('websites/regions') - - def get(self, **kwargs): - """ - res = client.WebsiteRegions.get() - res = client.WebsiteRegions.get(limit=2, offset=1) - res = client.WebsiteRegions.get(limit=2, offset=1, language='ru') - """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) - - -class SystemLanguages(Item): - """ - List of system languages - - Required scope - "public_data" - """ - - URL = Item.prepare_url('languages') - SINGLE_URL = Item.prepare_url('languages/%(code)s') - - def get(self, **kwargs): - """ - res = client.SystemLanguages.get() - res = client.SystemLanguages.get(limit=2, offset=1) - """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) - - def getOne(self, code='ru'): - """ - res = client.SystemLanguages.getOne(code='ru') - """ - return self.transport.set_method('GET').request(url=self.SINGLE_URL, code=code) - - -class SystemCurrencies(Item): - """ - List of system currencies - - Required scope - "public_data" - """ - - URL = Item.prepare_url('currencies') - - def get(self, **kwargs): - """ - res = client.SystemCurrencies.get() - res = client.SystemCurrencies.get(limit=2, offset=1) - """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) - - -class AdvertiserServices(Item): - """ - List of advertiser services - - Required scope - "public_data" - """ - - URL = Item.prepare_url('adservices') - SINGLE_URL = Item.prepare_url('adservices/%(id)s') - KIND_URL = Item.prepare_url('adservices/kind/%(kind)s') - KIND_SINGLE_URL = Item.prepare_url('adservices/%(id)s/kind/%(kind)s') - - def get(self, **kwargs): - """ - res = client.AdvertiserServices.get() - res = client.AdvertiserServices.get(limit=2, offset=1) - """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - res = client.AdvertiserServices.getOne(_id=2) - res = client.AdvertiserServices.getOne(1) - """ - kwargs['id'] = self.sanitize_id(_id) - kwargs['url'] = self.SINGLE_URL - return self.transport.set_method('GET').request(**kwargs) - - def getForKind(self, kind=None, **kwargs): - """ - Returns advertiser services for website types - - res = client.AdvertiserServices.getForKind(kind='website') - res = client.AdvertiserServices.getForKind('website') - """ - kwargs['kind'] = self.sanitize_non_blank_value(kind, 'kind') - kwargs['url'] = self.KIND_URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) - - def getForKindOne(self, _id, kind, **kwargs): - """ - Returns advertiser service for website types - - res = client.AdvertiserServices.getForKindOne(_id=2, kind='website') - res = client.AdvertiserServices.getForKindOne(2, 'website') - """ - kwargs['kind'] = self.sanitize_non_blank_value(kind, 'kind') - kwargs['id'] = self.sanitize_id(_id) - kwargs['url'] = self.KIND_SINGLE_URL - return self.transport.set_method('GET').request(**kwargs) - - -class CampaignCategories(Item): - """ - List of campaigns categories - - Required scope - "public_data" - """ - - ORDERING = ('name',) - - URL = Item.prepare_url('categories') - SINGLE_URL = Item.prepare_url('categories/%(id)s') - - def get(self, **kwargs): - """ - res = client.CampaignCategories.get() - res = client.CampaignCategories.get(limit=2, offset=1) - """ - kwargs['url'] = self.URL - kwargs['allowed_ordering'] = self.ORDERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_ordering(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - res = client.CampaignCategories.getOne(_id=2) - res = client.CampaignCategories.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) diff --git a/pyadmitad/items/banners.py b/pyadmitad/items/banners.py deleted file mode 100644 index 5755819..0000000 --- a/pyadmitad/items/banners.py +++ /dev/null @@ -1,54 +0,0 @@ -from pyadmitad.items.base import Item - - -__all__ = ( - 'Banners', - 'BannersForWebsite', -) - - -class Banners(Item): - """ - List of banners - - Required scope - "banners" - """ - - URL = Item.prepare_url('banners/%(id)s') - - def get(self, _id, **kwargs): - """ - Here _id is an id of advertising campaign - - res = client.Banners.get(_id=2) - res = client.Banners.get(2) - res = client.Banners.get(2, limit=2) - - """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) - - -class BannersForWebsite(Item): - """ - List of banners for the website - - Required scope - "banners_for_website" - """ - - URL = Item.prepare_url('banners/%(id)s/website/%(w_id)s') - - def get(self, _id, w_id, **kwargs): - """ - Here _id is an id of advertising campaign and - w_id is a id of website - - res = client.BannersForWebsite.get(_id=2, w_id=3) - res = client.BannersForWebsite.get(2, 3) - res = client.BannersForWebsite.get(2, 3, limit=5) - """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['w_id'] = self.sanitize_id(w_id) - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) diff --git a/pyadmitad/items/base.py b/pyadmitad/items/base.py deleted file mode 100644 index a8e89d4..0000000 --- a/pyadmitad/items/base.py +++ /dev/null @@ -1,126 +0,0 @@ -from datetime import datetime, date -from pyadmitad.constants import DATE_FORMAT, BASE_URL,\ - CURRENCIES, LONG_DATE_FORMAT - - -class Item(object): - - def __init__(self, transport): - self.transport = transport - self.transport.clean_data() - - def sanitize_id(self, _id, name='_id'): - return self.sanitize_integer_value(_id, name) - - @staticmethod - def sanitize_non_blank_value(value, name): - if not value: - raise ValueError("Invalid non-blank value '%s': %s" % (name, value)) - return value - - @staticmethod - def sanitize_string_value( - value, name, max_length=None, min_length=None, blank=False): - if not value: - if not blank: - raise ValueError( - "Invalid string value '%s': %s. Cannot be blank." % - (name, value)) - return value - if max_length and len(value) > max_length: - raise ValueError( - "Invalid string value '%s': %s. Max length: %s" % - (name, value, max_length)) - if min_length and len(value) < min_length: - raise ValueError( - "Invalid string value '%s': %s. Min length: %s" % - (name, value, min_length)) - return value - - @staticmethod - def sanitize_integer_value(value, name, blank=False): - if not value: - if not blank: - raise ValueError("Blank integer value '%s': %s" % (name, value)) - return value - if type(value) == int: - return str(value) - elif type(value) == str: - if value.isdigit(): - return value - raise ValueError("Invalid integer value '%s': %s" % (name, value)) - - @staticmethod - def sanitize_float_value(value, name, blank=False): - if not value: - if not blank: - raise ValueError("Blank float value '%s': %s" % (name, value)) - return value - if type(value) in (float, int): - return str(value) - elif type(value) == str: - try: - float(value) - return value - except ValueError: - raise ValueError("Invalid float value '%s': %s" % (name, value)) - raise ValueError("Invalid float value '%s': %s" % (name, value)) - - @staticmethod - def sanitize_integer_array(values, name, blank=False): - if not values: - if not blank: - raise ValueError( - "Blank integer values '%s': %s" % (name, values)) - return values - return [Item.sanitize_integer_value(x, name, blank=blank) - for x in values] - - @staticmethod - def sanitize_string_array( - values, name, max_length=None, min_length=None, blank=False): - if not values: - if not blank: - raise ValueError( - "Blank string values '%s': %s" % (name, values)) - return values - return [Item.sanitize_string_value( - x, name, max_length=max_length, min_length=min_length, blank=blank) - for x in values] - - @staticmethod - def sanitize_currency(value, blank=True): - if not value: - if not blank: - raise ValueError( - "Blank currency value: %s" % value) - return value - if value not in CURRENCIES: - raise ValueError( - "Invalid currency value: %s" % value) - return value - - @staticmethod - def check_date(dt): - s = datetime.strptime(dt, DATE_FORMAT).date() - if s > date.today(): - s = date.today() - return s.strftime(DATE_FORMAT) - - @staticmethod - def check_long_date(dt): - s = datetime.strptime(dt, LONG_DATE_FORMAT) - if s > datetime.now(): - s = datetime.now() - return s.strftime(LONG_DATE_FORMAT) - - @staticmethod - def prepare_url(path): - url = '%s%s' % (BASE_URL, path) - if not url.endswith('/'): - url += '/' - return url - - @staticmethod - def to_unicode(text): - return u'%s' % text diff --git a/pyadmitad/items/campaigns.py b/pyadmitad/items/campaigns.py deleted file mode 100644 index 3b548f8..0000000 --- a/pyadmitad/items/campaigns.py +++ /dev/null @@ -1,109 +0,0 @@ -from pyadmitad.items.base import Item - - -__all__ = ( - 'Campaigns', - 'CampaignsForWebsite', - 'CampaignsManage', -) - - -class Campaigns(Item): - """ - List of advertising campaigns - - Required scope - "advcampaigns" - """ - URL = Item.prepare_url('advcampaigns') - SINGLE_URL = Item.prepare_url('advcampaigns/%(id)s') - - def get(self, **kwargs): - """ - res = client.Campaigns.get() - res = client.Campaigns.get(limit=2) - - """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - Here _id is an a campaign id - - res = client.Campaigns.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) - - -class CampaignsForWebsite(Item): - """ - List of advertising campaigns for a website - - Required scope - "advcampaigns_for_website" - """ - URL = Item.prepare_url('advcampaigns/website/%(id)s') - SINGLE_URL = Item.prepare_url('advcampaigns/%(c_id)s/website/%(id)s') - - def get(self, _id, **kwargs): - """ - Here _id is a website id - - res = client.CampaignsForWebsite.get(22) - res = client.CampaignsForWebsite.get(limit=2) - - """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) - - def getOne(self, _id, c_id, **kwargs): - """ - Here _id is a website id and c_id is a campaign id - - res = client.CampaignsForWebsite.getOne(6, 22) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['c_id'] = self.sanitize_id(c_id) - return self.transport.set_method('GET').request(**kwargs) - - -class CampaignsManage(Item): - """ - Manage an advertising campaign - - Required scope - "manage_advcampaigns" - """ - CONNECT_URL = Item.prepare_url('advcampaigns/%(c_id)s/attach/%(w_id)s') - DISCONNECT_URL = Item.prepare_url('advcampaigns/%(c_id)s/detach/%(w_id)s') - - def _request(self, c_id, w_id, **kwargs): - kwargs['c_id'] = self.sanitize_id(c_id) - kwargs['w_id'] = self.sanitize_id(w_id) - return self.transport.set_method('POST').request(**kwargs) - - def connect(self, c_id, w_id, **kwargs): - """ - Connect an advertising campaign for a website - Here w_id is a website id and c_id is a campaign id - - res = client.CampaignsManage.connect(6, 22) - res = client.CampaignsManage.connect(c_id=6, w_id=22) - - """ - kwargs['url'] = self.CONNECT_URL - return self._request(c_id, w_id, **kwargs) - - def disconnect(self, c_id, w_id, **kwargs): - """ - Disconnect an advertising campaign from a website - Here w_id is a website id and c_id is a campaign id - - res = client.CampaignsManage.disconnect(6, 22) - res = client.CampaignsManage.disconnect(c_id=6, w_id=22) - - """ - kwargs['url'] = self.DISCONNECT_URL - return self._request(c_id, w_id, **kwargs) diff --git a/pyadmitad/items/coupons.py b/pyadmitad/items/coupons.py deleted file mode 100644 index 9cc0cff..0000000 --- a/pyadmitad/items/coupons.py +++ /dev/null @@ -1,106 +0,0 @@ -from pyadmitad.items.base import Item - - -__all__ = ( - 'Coupons', - 'CouponsForWebsite', -) - - -class CouponsBase(Item): - - ORDERING = ('name', 'date_start', 'date_end', 'rating',) - FILTERING = { - 'campaign': int, - 'campaign_category': int, - 'category': int, - 'type': int - } - - -class Coupons(CouponsBase): - """ - List of coupons - - Required scope - "coupons" - """ - - URL = Item.prepare_url('coupons') - SINGLE_URL = Item.prepare_url('coupons/%(id)s') - - def get(self, **kwargs): - """ - res = client.Coupons.get() - res = client.Coupons.get(order_by=date_start) - res = client.Coupons.get(order_by=-date_end) - res = client.Coupons.get(campaign=1, category=2) - - If you want to filter by many values of the same key: - on example - campaign=1, campaign=2: - - use campaign=[1, 2] - - res = client.Coupons.get(campaign=[1, 2], category=2) - - """ - kwargs['url'] = self.URL - kwargs['allowed_ordering'] = self.ORDERING - kwargs['allowed_filtering'] = self.FILTERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_ordering(**kwargs).set_filtering(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - res = client.Coupons.getOne(_id=2) - res = client.Coupons.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) - - -class CouponsForWebsite(CouponsBase): - """ - List of the website coupons - - Required scope - "coupons_for_website" - """ - - URL = Item.prepare_url('coupons/website/%(id)s') - SINGLE_URL = Item.prepare_url('coupons/%(c_id)s/website/%(id)s') - - def get(self, _id, **kwargs): - """ - Here id is a websites id - - res = client.CouponsForWebsite.get(_id=2) - res = client.CouponsForWebsite.get(2) - res = client.CouponsForWebsite.get(2, order_by=date_start) - res = client.CouponsForWebsite.get(2, campaign=1, category=2) - - If you want to filter by many values of the same key: - on example - campaign=1, campaign=2: - - use campaign=[1, 2] - - res = client.CouponsForWebsite.get(2, campaign=[1, 2], category=2) - - """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['allowed_ordering'] = self.ORDERING - kwargs['allowed_filtering'] = self.FILTERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_ordering(**kwargs).set_filtering(**kwargs).request(**kwargs) - - def getOne(self, _id, c_id, **kwargs): - """ - Here id is a websites id and c_id is a coupon id - - res = client.CouponsForWebsite.getOne(_id=2, c_id=1) - res = client.CouponsForWebsite.getOne(2, 1) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['c_id'] = self.sanitize_id(c_id) - return self.transport.set_method('GET').request(**kwargs) diff --git a/pyadmitad/items/me.py b/pyadmitad/items/me.py deleted file mode 100644 index 955fabd..0000000 --- a/pyadmitad/items/me.py +++ /dev/null @@ -1,48 +0,0 @@ -from pyadmitad.items.base import Item - - -__all__ = ( - 'Me', - 'Balance', -) - - -class Me(Item): - """ - Get private information - - Required scope - "private_data"|"private_data_email"|"private_data_phone" - """ - - def __call__(self, **kwargs): - return self.get(**kwargs) - - URL = Item.prepare_url('me') - - def get(self, **kwargs): - """ - res = client.Me.get() - res = client.Me.get(language='ru') - """ - kwargs['url'] = self.URL - return self.transport.set_method("GET").request(**kwargs) - - -class Balance(Item): - """ - Get balance information - - Required scope - "private_data_balance" - """ - - def __call__(self, **kwargs): - return self.get(**kwargs) - - URL = Item.prepare_url('me/balance') - - def get(self, **kwargs): - """ - res = client.Balance.get() - """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_method("GET").request(**kwargs) diff --git a/pyadmitad/items/money_transfers.py b/pyadmitad/items/money_transfers.py deleted file mode 100644 index 99a5c60..0000000 --- a/pyadmitad/items/money_transfers.py +++ /dev/null @@ -1,88 +0,0 @@ -from copy import deepcopy -from pyadmitad.items.base import Item - - -__all__ = ( - 'MoneyTransfers', - 'MoneyTransfersManage', -) - - -class MoneyTransfersBase(Item): - - ORDERING = ('date_created',) - FILTERING = { - 'sender': Item.to_unicode, - 'recipient': Item.to_unicode, - 'currency': Item.to_unicode, - } - - -class MoneyTransfers(Item): - """ - List of webmaster money transfers - - Required scope - "webmaster_money_transfers" - """ - URL = Item.prepare_url('webmaster_money_transfers') - SINGLE_URL = Item.prepare_url('webmaster_money_transfer/%(id)s') - - def get(self, **kwargs): - """ - res = client.MoneyTransfers.get() - res = client.MoneyTransfers.get(limit=2) - - """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').\ - set_pagination(**kwargs).set_filtering(**kwargs).\ - set_ordering(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - res = client.MoneyTransfers.getOne(_id=2) - res = client.MoneyTransfers.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) - - -class MoneyTransfersManage(Item): - """ - Manage webmaster money transfers - - Required scope - "manage_webmaster_money_transfers" - """ - CREATE_FIELDS = { - 'comment': lambda x: Item.sanitize_string_value(x, 'comment'), - 'recipient': lambda x: Item.sanitize_string_value(x, 'recipient'), - 'currency': lambda x: Item.sanitize_currency(x, 'currency'), - 'sum': lambda x: Item.sanitize_float_value(x, 'sum') - } - - CREATE_URL = Item.prepare_url('webmaster_money_transfer/create') - - @staticmethod - def sanitize_fields(fields, **kwargs): - data = deepcopy(kwargs) - for field in fields: - data[field] = fields[field](data.get(field)) - return dict([(key, value) for (key, value) in data.items() if value]) - - def create(self, **kwargs): - """ - Create a webmaster money transfers - - res = client.MoneyTransfersManage.create( - sender='webmaster', - recipient='recipient', - sum=200, - currency='USD', - comment='comment') - - """ - data = self.sanitize_fields(self.CREATE_FIELDS, **kwargs) - kwargs['url'] = self.CREATE_URL - return self.transport.set_method('POST').\ - set_data(data).request(**kwargs) diff --git a/pyadmitad/items/payments.py b/pyadmitad/items/payments.py deleted file mode 100644 index 8254bef..0000000 --- a/pyadmitad/items/payments.py +++ /dev/null @@ -1,83 +0,0 @@ -from pyadmitad.items.base import Item - - -__all__ = ( - 'Payments', - 'PaymentsManage', -) - - -class Payments(Item): - """ - List of webmaster payments - - Required scope - "payments" - """ - URL = Item.prepare_url('payments') - SINGLE_URL = Item.prepare_url('payments/%(id)s') - - def get(self, **kwargs): - """ - res = client.Payments.get() - res = client.Payments.get(limit=2) - - """ - kwargs['url'] = self.URL - return self.transport.set_method('GET').set_pagination(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - res = client.Payments.getOne(_id=2) - res = client.Payments.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) - - -class PaymentsManage(Item): - """ - Manage payments - - Required scope - "manage_websites" - """ - - CREATE_URL = Item.prepare_url('payments/request/%(code)s') - CONFIRM_URL = Item.prepare_url('payments/confirm/%(id)s') - DELETE_URL = Item.prepare_url('payments/delete/%(id)s') - - def create(self, _code, **kwargs): - """ - Create a payment request. - _code is a code of currency - - res = client.PaymentsManage.create('USD') - - """ - kwargs['url'] = self.CREATE_URL - kwargs['code'] = self.sanitize_currency(_code) - return self.transport.set_method('POST').request(**kwargs) - - def confirm(self, _id, **kwargs): - """ - Confirm a payment request. - _id is a payment id. - - res = client.PaymentsManage.confirm(71) - - """ - kwargs['url'] = self.CONFIRM_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('POST').request(**kwargs) - - def delete(self, _id, **kwargs): - """ - Delete a payment request. - _id is a payment id. - - res = client.PaymentsManage.delete(71) - - """ - kwargs['url'] = self.DELETE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('POST').request(**kwargs) diff --git a/pyadmitad/items/products.py b/pyadmitad/items/products.py deleted file mode 100644 index ca0a738..0000000 --- a/pyadmitad/items/products.py +++ /dev/null @@ -1,156 +0,0 @@ -from pyadmitad.items.base import Item - -__all__ = ( - 'ProductCategories', - 'ProductVendors', - 'ProductCampaigns', - 'Products', -) - - -class ProductCategories(Item): - """ - List of products categories - - Required scope - "public_data" - """ - URL = Item.prepare_url('products/categories') - SINGLE_URL = Item.prepare_url('products/categories/%(id)s') - - ORDERING = ('name',) - - def get(self, **kwargs): - """ - res = client.ProductCategories.get() - res = client.ProductCategories.get(limit=1, order_by=-name) - """ - kwargs['url'] = self.URL - kwargs['allowed_ordering'] = self.ORDERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_ordering(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - Here _id is category id. - - res = client.ProductCategories.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) - - -class ProductVendors(Item): - """ - List of products vendors - - Required scope - "public_data" - """ - URL = Item.prepare_url('products/vendors') - SINGLE_URL = Item.prepare_url('products/vendors/%(id)s') - - ORDERING = ('name',) - - def get(self, **kwargs): - """ - res = client.ProductVendors.get() - res = client.ProductVendors.get(limit=1, order_by=-name) - """ - kwargs['url'] = self.URL - kwargs['allowed_ordering'] = self.ORDERING - return self.transport.set_method('GET').set_pagination(**kwargs). \ - set_ordering(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - Here _id is category id. - - res = client.ProductVendors.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) - - -class ProductCampaigns(Item): - """ - List of campaigns that have products - - Required scope - "products_for_website" - """ - URL = Item.prepare_url('products/advcampaigns/website/%(id)s') - SINGLE_URL = Item.prepare_url( - 'products/advcampaigns/%(c_id)s/website/%(id)s') - - ORDERING = ('name',) - - def get(self, _id, **kwargs): - """ - Here _id is website id. - - res = client.ProductCampaigns.get(22) - res = client.ProductCampaigns.get(22, limit=1, order_by=-name) - """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['allowed_ordering'] = self.ORDERING - return self.transport.set_method('GET').set_pagination(**kwargs). \ - set_ordering(**kwargs).request(**kwargs) - - def getOne(self, _id, c_id, **kwargs): - """ - Here _id is website id and c_id is campaign id - - res = client.ProductCampaigns.getOne(22, 6) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['c_id'] = self.sanitize_id(c_id) - return self.transport.set_method('GET').request(**kwargs) - - -class Products(Item): - """ - List of products - - Required scope - "products_for_website" - """ - URL = Item.prepare_url('products/website/%(id)s') - SINGLE_URL = Item.prepare_url('products/%(p_id)s/website/%(id)s') - - ORDERING = ('price', 'category', 'vendor', 'campaign', 'date_updated') - FILTERING = { - 'keyword': Item.to_unicode, - 'price_from': int, - 'price_to': int, - 'campaign': int, - 'category': int, - 'vendor': int - } - - def get(self, _id, **kwargs): - """ - Here _id is website id. - - res = client.Products.get(22) - res = client.Products.get(22, limit=1) - res = client.Products.get(22, limit=1, order_by=-price) - res = client.Products.get(22, price_from=1000) - """ - kwargs['url'] = self.URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['allowed_ordering'] = self.ORDERING - kwargs['allowed_filtering'] = self.FILTERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_filtering(**kwargs).set_ordering(**kwargs).request(**kwargs) - - def getOne(self, _id, p_id, **kwargs): - """ - Here _id is website id and p_id is product id - - res = client.ProductCampaigns.getOne(22, 2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - kwargs['p_id'] = self.sanitize_id(p_id) - return self.transport.set_method('GET').request(**kwargs) diff --git a/pyadmitad/items/referrals.py b/pyadmitad/items/referrals.py deleted file mode 100644 index a071fb1..0000000 --- a/pyadmitad/items/referrals.py +++ /dev/null @@ -1,41 +0,0 @@ -from pyadmitad.items.base import Item - - -__all__ = ( - 'Referrals', -) - - -class Referrals(Item): - """ - List of referrals - - Required scope - "referrals" - """ - - URL = Item.prepare_url('referrals') - SINGLE_URL = Item.prepare_url('referrals/%(id)s') - - FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date - } - - def get(self, **kwargs): - """ - res = client.Referrals.get() - res = client.Referrals.get(limit=2) - """ - kwargs['url'] = self.URL - kwargs['allowed_filtering'] = self.FILTERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_filtering(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - res = client.Referrals.getOne(_id=2) - res = client.Referrals.getOne(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) diff --git a/pyadmitad/items/statistics.py b/pyadmitad/items/statistics.py deleted file mode 100644 index 5e3db09..0000000 --- a/pyadmitad/items/statistics.py +++ /dev/null @@ -1,369 +0,0 @@ -from copy import copy -from pyadmitad.constants import SUB_ID_MAX_LENGTH -from pyadmitad.items.base import Item - - -__all__ = ( - 'StatisticWebsites', - 'StatisticCampaigns', - 'StatisticDays', - 'StatisticMonths', - 'StatisticActions', - 'StatisticSubIds', - 'StatisticSources', - 'StatisticKeywords', -) - - -class StatisticBase(Item): - - STATUSES = (1, 2, 3) - SOURCES = ('g', 'y') - ACTION_TYPES = ('lead', 'Lead') - - ORDERING = ( - 'action', - 'clicks', - 'cr', - 'ctr', - 'ecpc', - 'ecpm', - 'leads', - 'name', - 'payment_sum', - 'payment_sum_approved', - 'payment_sum_declined', - 'payment_sum_open', - 'sales', - 'views', - ) - - @staticmethod - def check_sub_id(sub_id): - return u'%s' % sub_id if len(sub_id) <= SUB_ID_MAX_LENGTH else None - - @staticmethod - def check_sources(source): - return source if source in StatisticBase.SOURCES else None, - - @staticmethod - def check_status(status): - return status if status in StatisticBase.STATUSES else None, - - @staticmethod - def check_actions_type(action_type): - return action_type if action_type\ - in StatisticBase.ACTION_TYPES else None, - - FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date, - 'website': int, - 'campaign': int, - 'subid': check_sub_id - } - - def get(self, url, **kwargs): - """Base GET method""" - kwargs['url'] = url - kwargs['allowed_filtering'] = self.FILTERING - kwargs['allowed_ordering'] = self.ORDERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_filtering(**kwargs).set_ordering(**kwargs).request(**kwargs) - - -class StatisticWebsites(StatisticBase): - """ - Statistics by websites - - Required scope - "statistics" - """ - - URL = Item.prepare_url('statistics/websites') - - def get(self, **kwargs): - """ - res = client.StatisticWebsites.get() - res = client.StatisticWebsites.get(website=1, campaign=1) - res = client.StatisticWebsites.get(subid="ADS778") - res = client.StatisticWebsites.get(limit=2) - res = client.StatisticWebsites.get(date_start='01.01.2013') - - """ - return super(StatisticWebsites, self).get(self.URL, **kwargs) - - -class StatisticCampaigns(StatisticBase): - """ - Statistics by campaigns - - Required scope - "statistics" - """ - - URL = Item.prepare_url('statistics/campaigns') - - def get(self, **kwargs): - """ - res = client.StatisticCampaigns.get() - res = client.StatisticCampaigns.get(website=1, campaign=1) - res = client.StatisticCampaigns.get(sub_id="ADS778") - res = client.StatisticCampaigns.get(limit=2) - res = client.StatisticCampaigns.get(date_start='01.01.2013') - - """ - return super(StatisticCampaigns, self).get(self.URL, **kwargs) - - -class StatisticDays(StatisticBase): - """ - Statistics by days - - Required scope - "statistics" - """ - - URL = Item.prepare_url('statistics/dates') - - def get(self, **kwargs): - """ - res = client.StatisticDays.get() - res = client.StatisticDays.get(website=1, campaign=1) - res = client.StatisticDays.get(sub_id="ADS778") - res = client.StatisticDays.get(limit=2) - res = client.StatisticDays.get(date_start='01.01.2013') - """ - return super(StatisticDays, self).get(self.URL, **kwargs) - - -class StatisticMonths(StatisticBase): - """ - Statistics by months - - Required scope - "statistics" - """ - - URL = Item.prepare_url('statistics/months') - - def get(self, **kwargs): - """ - res = client.StatisticMonths.get() - res = client.StatisticMonths.get(website=1, campaign=1) - res = client.StatisticMonths.get(sub_id="ADS778") - res = client.StatisticMonths.get(limit=2) - res = client.StatisticMonths.get(date_start='01.01.2013') - - """ - return super(StatisticMonths, self).get(self.URL, **kwargs) - - -class StatisticActions(StatisticBase): - """ - Statistics by actions - - Required scope - "statistics" - """ - - ORDERING = ( - 'action', - 'banner', - 'banner_id', - 'campaign', - 'cart', - 'click_date', - 'conv_time', - 'datetime', - 'payment', - 'status', - 'subid', - 'subid1', - 'subid2', - 'subid3', - 'subid4', - 'website' - ) - - FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date, - 'closing_date_start': Item.check_date, - 'closing_date_end': Item.check_date, - 'status_updated_start': Item.check_long_date, - 'status_updated_end': Item.check_long_date, - 'website': int, - 'campaign': int, - 'subid': StatisticBase.check_sub_id, - 'subid1': StatisticBase.check_sub_id, - 'subid2': StatisticBase.check_sub_id, - 'subid3': StatisticBase.check_sub_id, - 'subid4': StatisticBase.check_sub_id, - 'source': StatisticBase.check_sources, - 'status': StatisticBase.check_status, - 'keyword': Item.to_unicode, - 'action': Item.to_unicode, - 'action_type': StatisticBase.check_actions_type, - 'action_id': int - } - - URL = Item.prepare_url('statistics/actions') - - def get(self, **kwargs): - """ - res = client.StatisticActions.get() - res = client.StatisticActions.get(website=1, campaign=1) - res = client.StatisticActions.get(subid="ADS778") - res = client.StatisticActions.get(limit=2) - res = client.StatisticActions.get(date_start='01.01.2013') - - """ - return super(StatisticActions, self).get(self.URL, **kwargs) - - -class StatisticSubIds(StatisticBase): - """ - Statistics by sub-ids - - Required scope - "statistics" - """ - SUB_ID_NUMBERS = range(0, 5) - - ORDERING = ( - 'actions', - 'clicks', - 'cr', - 'ecpc', - 'leads', - 'payment_sum', - 'payment_sum_approved', - 'payment_sum_declined', - 'payment_sum_open', - 'sales' - ) - - FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date, - 'website': int, - 'campaign': int, - } - - URL = Item.prepare_url('statistics/sub_ids%s') - - def sanitize_sub_id_number(self, number): - if number not in self.SUB_ID_NUMBERS: - raise ValueError("Invalid subid number. '%s': %s" % ( - number, self.SUB_ID_NUMBERS)) - - def prepare_filtering(self, sub_id_number): - params = copy(self.FILTERING) - subid_params = dict([ - ('subid%s' % (val or ''), StatisticBase.check_sub_id) - for val in self.SUB_ID_NUMBERS if val != sub_id_number]) - params.update(subid_params) - return params - - def prepare_ordering(self, sub_id_number): - sub_id_name = 'subid%s' % (sub_id_number or '') - return self.ORDERING + (sub_id_name,) - - def get(self, sub_id_number=0, **kwargs): - """ - Here sub_id_number is subid number. - It is allowed from 0 to 5 excluding. - It just will send request to sub_ids, sub_ids1, sub_ids2, - sub_ids3, sub_ids4 urls correspondingly. - - res = client.StatisticSubIds.get() - res = client.StatisticSubIds.get(date_start='01.01.2013') - res = client.StatisticSubIds.get(subid="ADS778") - res = client.StatisticSubIds.get(subid1="ADS778", sub_id_number=2) - res = client.StatisticSubIds.get(limit=2) - - """ - self.sanitize_sub_id_number(sub_id_number) - kwargs['url'] = self.URL % (sub_id_number or '') - kwargs['allowed_filtering'] = self.prepare_filtering(sub_id_number) - kwargs['allowed_ordering'] = self.prepare_ordering(sub_id_number) - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_filtering(**kwargs).set_ordering(**kwargs).request(**kwargs) - - -class StatisticSources(StatisticBase): - """ - Statistics by sources - - Required scope - "statistics" - """ - - ORDERING = ( - 'actions', - 'clicks', - 'cr', - 'ecpc', - 'leads', - 'payment_sum', - 'payment_sum_approved', - 'payment_sum_declined', - 'payment_sum_open', - 'sales', - 'source', - ) - - FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date, - 'website': int, - 'campaign': int, - } - - URL = Item.prepare_url('statistics/sources') - - def get(self, **kwargs): - """ - res = client.StatisticSources.get() - res = client.StatisticSources.get(date_start='01.01.2013') - res = client.StatisticSources.get(limit=2) - - """ - return super(StatisticSources, self).get(self.URL, **kwargs) - - -class StatisticKeywords(StatisticBase): - """ - Statistics by keywords - - Required scope - "statistics" - """ - - ORDERING = ( - 'actions', - 'clicks', - 'cr', - 'ecpc', - 'keyword', - 'leads', - 'payment_sum', - 'payment_sum_approved', - 'payment_sum_declined', - 'payment_sum_open', - 'sales', - 'source', - ) - - FILTERING = { - 'date_start': Item.check_date, - 'date_end': Item.check_date, - 'website': int, - 'campaign': int, - 'source': ( - lambda x: x if x in StatisticBase.SOURCES else None), - } - - URL = Item.prepare_url('statistics/keywords') - - def get(self, **kwargs): - """ - res = client.StatisticKeywords.get() - res = client.StatisticKeywords.get(date_start='01.01.2013') - res = client.StatisticKeywords.get(limit=2) - - """ - return super(StatisticKeywords, self).get(self.URL, **kwargs) diff --git a/pyadmitad/items/websites.py b/pyadmitad/items/websites.py deleted file mode 100644 index f2ee1a7..0000000 --- a/pyadmitad/items/websites.py +++ /dev/null @@ -1,150 +0,0 @@ -from copy import deepcopy -from pyadmitad.items.base import Item - - -__all__ = ( - 'Websites', - 'WebsitesManage' -) - - -class Websites(Item): - """ - List of websites - - Required scope - "websites" - """ - URL = Item.prepare_url('websites') - SINGLE_URL = Item.prepare_url('websites/%(id)s') - - STATUS_FILTERING = ('new', 'pending', 'active', 'suspended', 'declined') - CAMPAIGN_STATUS_FILTERING = ('pending', 'active', 'declined', 'disabled') - FILTERING = { - 'status': lambda x: x if x in Websites.STATUS_FILTERING else None, - 'campaign_status': ( - lambda x: x if x in Websites.CAMPAIGN_STATUS_FILTERING else None), - } - - def get(self, **kwargs): - """ - res = client.Websites.get() - res = client.Websites.get(status='new', campaign_status='active') - - """ - kwargs['url'] = self.URL - kwargs['allowed_filtering'] = self.FILTERING - return self.transport.set_method('GET').set_pagination(**kwargs).\ - set_filtering(**kwargs).request(**kwargs) - - def getOne(self, _id, **kwargs): - """ - res = client.Websites.getOne(_id=2) - res = client.Websites.getOne2(2) - """ - kwargs['url'] = self.SINGLE_URL - kwargs['id'] = self.sanitize_id(_id) - return self.transport.set_method('GET').request(**kwargs) - - -class WebsitesManage(Item): - """ - Manage websites - - Required scope - "manage_websites" - """ - CREATE_URL = Item.prepare_url('website/create') - UPDATE_URL = Item.prepare_url('website/update/%(id)s') - VERIFY_URL = Item.prepare_url('website/verify/%(id)s') - DELETE_URL = Item.prepare_url('website/delete/%(id)s') - - CREATE_FIELDS = { - 'name': lambda x: Item.sanitize_string_value(x, 'name', max_length=200), - 'kind': lambda x: Item.sanitize_string_value(x, 'kind', max_length=20), - 'language': lambda x: Item.sanitize_string_value( - x, 'language', max_length=2), - 'adservice': lambda x: Item.sanitize_integer_value( - x, 'adservice', blank=True), - 'site_url': lambda x: Item.sanitize_string_value( - x, 'site_url', max_length=255), - 'description': lambda x: Item.sanitize_string_value( - x, 'description', max_length=20000, min_length=100), - 'categories': lambda x: Item.sanitize_integer_array(x, 'categories'), - 'regions': lambda x: Item.sanitize_string_array( - x, 'regions', max_length=2), - 'atnd_visits': lambda x: Item.sanitize_integer_value( - x, 'atnd_visits', blank=False), - 'atnd_hits': lambda x: Item.sanitize_integer_value( - x, 'atnd_hits', blank=False) - } - - UPDATE_FIELDS = { - 'name': lambda x: Item.sanitize_string_value( - x, 'name', max_length=200, blank=True), - 'language': lambda x: Item.sanitize_string_value( - x, 'language', max_length=2, blank=True), - 'adservice': lambda x: Item.sanitize_integer_value( - x, 'adservice', blank=True), - 'site_url': lambda x: Item.sanitize_string_value( - x, 'site_url', max_length=255, blank=True), - 'description': lambda x: Item.sanitize_string_value( - x, 'description', max_length=20000, min_length=100, blank=True), - 'categories': lambda x: Item.sanitize_integer_array( - x, 'categories', blank=True), - 'regions': lambda x: Item.sanitize_string_array( - x, 'regions', max_length=2, blank=True), - 'atnd_visits': lambda x: Item.sanitize_integer_value( - x, 'atnd_visits', blank=True), - 'atnd_hits': lambda x: Item.sanitize_integer_value( - x, 'atnd_hits', blank=True) - } - - @staticmethod - def sanitize_fields(fields, **kwargs): - data = deepcopy(kwargs) - for field in fields: - data[field] = fields[field](data.get(field)) - return dict([(key, value) for (key, value) in data.items() if value]) - - def create(self, **kwargs): - """ - res = client.WebsitesManage.create(name='test', ....) - - """ - data = self.sanitize_fields(self.CREATE_FIELDS, **kwargs) - kwargs['url'] = self.CREATE_URL - kwargs.pop('language', None) - return self.transport.set_method('POST').set_data(data).request(**kwargs) - - def update(self, _id, **kwargs): - """ - Here _id is a website id. - - res = client.WebsitesManage.update(22, name='test', ....) - - """ - data = self.sanitize_fields(self.UPDATE_FIELDS, **kwargs) - kwargs['url'] = self.UPDATE_URL - kwargs['id'] = self.sanitize_id(_id) - kwargs.pop('language', None) - return self.transport.set_method('POST').set_data(data).request(**kwargs) - - def verify(self, _id): - """ - Here _id is a website id. - - res = client.WebsitesManage.verify(40) - - """ - - data = {'url': self.VERIFY_URL, 'id': self.sanitize_id(_id)} - return self.transport.set_method('POST').request(**data) - - def delete(self, _id): - """ - Here _id is a website id. - - res = client.WebsitesManage.delete(40) - - """ - data = {'url': self.DELETE_URL, 'id': self.sanitize_id(_id)} - return self.transport.set_method('POST').request(**data) diff --git a/pyadmitad/tests/base.py b/pyadmitad/tests/base.py deleted file mode 100644 index 28de23d..0000000 --- a/pyadmitad/tests/base.py +++ /dev/null @@ -1,40 +0,0 @@ -# -*- coding: utf-8 -*- - -from mocker import MockerTestCase -from pyadmitad.api import get_oauth_client -from pyadmitad.transport import build_headers, \ - HttpTransportPagination, HttpTransportOrdering, HttpTransportFiltering - - -class BaseTestCase(MockerTestCase): - - def prepare_data(self, **kwargs): - with_pagination = kwargs.pop('with_pagination', True) - with_ordering = kwargs.pop('with_ordering', True) - with_filtering = kwargs.pop('with_filtering', True) - data = kwargs.get('data', {}) or {} - if with_pagination: - data.update(HttpTransportPagination(**kwargs).to_value()) - if with_ordering: - data.update(HttpTransportOrdering(**kwargs).to_value()) - if with_filtering: - data.update(HttpTransportFiltering(**kwargs).to_value()) - return data or None - - @staticmethod - def prepare_method(**kwargs): - method = kwargs.get('method', 'GET') - return method if method in ('POST', 'GET') else 'GET' - - def set_mocker(self, url, **kwargs): - access_token = 'access_token' - self.client = get_oauth_client(access_token) - obj = self.mocker.patch(self.client.transport) - url = url % kwargs - kwargs = { - 'data': self.prepare_data(**kwargs), - 'headers': build_headers(access_token), - 'method': BaseTestCase.prepare_method(**kwargs), - 'debug': False - } - obj.api_request(url, **kwargs) diff --git a/pyadmitad/tests/test_announcements.py b/pyadmitad/tests/test_announcements.py deleted file mode 100644 index e20f58a..0000000 --- a/pyadmitad/tests/test_announcements.py +++ /dev/null @@ -1,74 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import Announcements, AnnouncementsManage -from pyadmitad.tests.base import BaseTestCase - - -ANNOUNCEMENTS_RESULTS = { - u'results': [ - { - u'message': u'Сотрудничество подтверждено', - u'id': 264, - u'advcampaign': { - u'id': 8, - u'name': u'AdvCamp 3' - }, - u'event': u'request_accepted' - } - ], - u'_meta': { - u'count': 50, - u'limit': 1, - u'offset': 0 - } -} - -ANNOUNCEMENTS_DELETE_RESULTS = { - u'message': u'Оповещение удалено успешно.', - u'success': u'Deleted' -} - - -class AnnouncementsTestCase(BaseTestCase): - - def test_get_announcements_request(self): - self.set_mocker(Announcements.URL, limit=1) - result = ANNOUNCEMENTS_RESULTS - self.mocker.result(result) - self.mocker.replay() - res = self.client.Announcements.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() - - def test_get_announcements_request_with_id(self): - self.set_mocker(Announcements.SINGLE_URL, id=264, with_pagination=False) - result = ANNOUNCEMENTS_RESULTS['results'][0] - self.mocker.result(result) - self.mocker.replay() - res = self.client.Announcements.getOne(264) - self.assertEqual(res[u'id'], 264) - self.mocker.verify() - - -class AnnouncementsManageTestCase(BaseTestCase): - - def test_delete_announcements_request(self): - self.set_mocker( - AnnouncementsManage.DELETE_URL, id=264, - with_pagination=False, method='POST') - result = ANNOUNCEMENTS_DELETE_RESULTS - self.mocker.result(result) - self.mocker.replay() - res = self.client.AnnouncementsManage.delete(264) - self.assertIn(u'message', res) - self.assertIn(u'success', res) - self.mocker.verify() - - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/tests/test_auxiliary.py b/pyadmitad/tests/test_auxiliary.py deleted file mode 100644 index 8fd1830..0000000 --- a/pyadmitad/tests/test_auxiliary.py +++ /dev/null @@ -1,484 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.tests.base import BaseTestCase -from pyadmitad.items.auxiliary import * - - -class WebsiteTypesTestCase(BaseTestCase): - - def test_get_website_types_request(self): - self.set_mocker(WebsiteTypes.URL) - result = { - u'results': [ - u'website', - u'doorway', - u'contextual', - u'social_app', - u'social_group', - u'social_teaser', - u'arbitrage' - ], - u'_meta': { - u'count': 7, - u'limit': 20, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsiteTypes.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.mocker.verify() - - def test_get_website_types_request_with_pagination(self): - self.set_mocker(WebsiteTypes.URL, offset=1, limit=2) - result = { - u'results': [ - u'doorway', - u'contextual' - ], - u'_meta': { - u'count': 7, - u'limit': 2, - u'offset': 1 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsiteTypes.get(offset=1, limit=2) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertEqual(len(res[u'results']), 2) - _meta = res[u'_meta'] - self.assertEqual(_meta[u'count'], 7) - self.assertEqual(_meta[u'limit'], 2) - self.assertEqual(_meta[u'offset'], 1) - self.mocker.verify() - - -class WebsiteRegionsTestCase(BaseTestCase): - - def test_get_website_regions_request(self): - self.set_mocker(WebsiteRegions.URL) - result = { - u'results': [ - u'RU', u'UA', u'BY', u'KZ', u'DE', u'FR', u'US', u'AM', u'AU', - u'AZ', u'CA', u'EE', u'GE', u'KG', u'LV', u'LT', u'MD', u'TJ', - u'TM', u'UZ' - ], - u'_meta': { - u'count': 20, - u'limit': 20, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsiteRegions.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - def test_get_website_regions_request_with_pagination(self): - self.set_mocker(WebsiteRegions.URL, offset=1, limit=2) - result = { - u'results': [u'UA', u'BY'], - u'_meta': { - u'count': 20, - u'limit': 2, - u'offset': 1 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsiteRegions.get(offset=1, limit=2) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(len(res[u'results']), 2) - _meta = res[u'_meta'] - self.assertEqual(_meta[u'count'], 20) - self.assertEqual(_meta[u'limit'], 2) - self.assertEqual(_meta[u'offset'], 1) - self.mocker.verify() - - -class SystemLanguagesTestCase(BaseTestCase): - - def test_get_languages_request(self): - self.set_mocker(SystemLanguages.URL) - result = { - u'results': [ - { - u'flag': u'https://admitad.com/media/images/flags/' - u'c8ef33a926799c7c3d7103212a78b187.png', - u'language': u'Русский', - u'language_code': u'ru' - }, - { - u'flag': u'', - u'language': u'Deutsch', - u'language_code': u'de' - } - ], - u'_meta': { - u'count': 2, - u'limit': 20, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.SystemLanguages.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - def test_get_language_request_with_code(self): - self.set_mocker(SystemLanguages.SINGLE_URL, - code='ru', with_pagination=False) - result = { - u'flag': u'https://admitad.trezor.by/media/images/flags/' - u'c8ef33a926799c7c3d7103212a78b187.png', - u'language': u'Русский', - u'language_code': u'ru' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.SystemLanguages.getOne(code='ru') - self.assertIn(u'flag', res) - self.assertIn(u'language', res) - self.assertIn(u'language_code', res) - self.mocker.verify() - - -class SystemCurrenciesTestCase(BaseTestCase): - - def test_get_currencies_request(self): - self.set_mocker(SystemCurrencies.URL) - result = { - u'results': [ - { - u'code': u'EUR', - u'min_sum': u'20.00', - u'name': u'Евро', - u'sign': u'€' - }, - { - u'code': u'RUB', - u'min_sum': u'750.00', - u'name': u'Российский рубль', - u'sign': u'руб.' - }, - { - u'code': u'USD', - u'min_sum': u'25.00', - u'name': u'Американский доллар', - u'sign': u'$' - } - ], - u'_meta': { - u'count': 3, - u'limit': 20, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.SystemCurrencies.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - def test_get_currencies_request_with_pagination(self): - self.set_mocker(SystemCurrencies.URL, offset=1, limit=1) - result = { - u'results': [ - { - u'code': u'RUB', - u'min_sum': u'750.00', - u'name': u'Российский рубль', - u'sign': u'руб.' - } - ], - u'_meta': { - u'count': 3, - u'limit': 1, - u'offset': 1 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.SystemCurrencies.get(offset=1, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(res[u'_meta'][u'offset'], 1) - self.mocker.verify() - - -class AdvertiserServiceTestCase(BaseTestCase): - - def test_get_advertiser_services(self): - self.set_mocker(AdvertiserServices.URL) - result = { - u'results': [ - { - u'allowed_referrers': u'', - u'id': 1, - u'logo': u'https://admitad.com/media/adservice/images/' - u'755c6ece4a7f2a45548737c212906434.png', - u'name': u'Yandex.Direct', - u'url': u'http://direct.yandex.ru/' - }, - { - u'allowed_referrers': u'', - u'id': 2, - u'logo': u'https://admitad.com/media/adservice/images/' - u'273ad9483718164ffd05066a8bebec46.png', - u'name': u'Бегун', - u'url': u'http://begun.ru/' - } - ], - u'_meta': { - u'count': 2, - u'limit': 20, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.AdvertiserServices.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - def test_get_advertiser_services_with_pagination(self): - self.set_mocker(AdvertiserServices.URL, offset=1, limit=1) - result = { - u'results': [ - { - u'allowed_referrers': u'', - u'id': 2, - u'logo': u'https://admitad.com/media/adservice/images/' - u'273ad9483718164ffd05066a8bebec46.png', - u'name': u'Бегун', - u'url': u'http://begun.ru/' - } - ], - u'_meta': { - u'count': 2, - u'limit': 1, - u'offset': 1 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.AdvertiserServices.get(offset=1, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(res[u'_meta'][u'offset'], 1) - self.mocker.verify() - - def test_get_advertiser_services_with_id(self): - self.set_mocker( - AdvertiserServices.SINGLE_URL, - **{'id': 2, 'with_pagination': False}) - result = { - u'allowed_referrers': u'', - u'id': 2, - u'logo': u'https://admitad.com/media/adservice/images/' - u'273ad9483718164ffd05066a8bebec46.png', - u'name': u'Бегун', - u'url': u'http://begun.ru/' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.AdvertiserServices.getOne(_id=2) - self.assertEqual(res[u'id'], 2) - self.mocker.verify() - - def test_get_advertiser_services_with_kind(self): - self.set_mocker(AdvertiserServices.KIND_URL, kind='contextual') - result = { - u'results': [ - { - u'allowed_referrers': u'', - u'id': 1, - u'logo': u'https://admitad.com/media/adservice/images/' - u'755c6ece4a7f2a45548737c212906434.png', - u'name': u'Yandex.Direct', - u'url': u'http://direct.yandex.ru/' - }, - { - u'allowed_referrers': u'', - u'id': 2, - u'logo': u'https://admitad.com/media/adservice/images/' - u'273ad9483718164ffd05066a8bebec46.png', - u'name': u'Бегун', - u'url': u'http://begun.ru/' - } - ], - u'_meta': { - u'count': 2, - u'limit': 20, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.AdvertiserServices.getForKind(kind='contextual') - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - def test_get_advertiser_services_with_kind_and_id(self): - self.set_mocker(AdvertiserServices.KIND_SINGLE_URL, - id=2, kind='contextual', with_pagination=False) - result = { - u'allowed_referrers': u'', - u'id': 2, - u'logo': u'https://admitad.com/media/adservice/images/' - u'273ad9483718164ffd05066a8bebec46.png', - u'name': u'Бегун', - u'url': u'http://begun.ru/' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.AdvertiserServices.getForKindOne(2, 'contextual') - self.assertEqual(res[u'id'], 2) - self.mocker.verify() - - -class CampaignsCategoriesTestCase(BaseTestCase): - - def test_get_campaigns_categories(self): - self.set_mocker(CampaignCategories.URL) - result = { - u'results': [ - { - u'id': 3, - u'name': u'Браузерные', - u'parent': { - u'id': 2, - u'name': u'Онлайн-игр', - u'parent': None - } - }, - { - u'id': 5, - u'name': u'Другая', - u'parent': None - }, - { - u'id': 4, - u'name': u'Клиентские', - u'parent': { - u'id': 2, - u'name': u'Онлайн-игры', - u'parent': None - } - } - ], - u'_meta': { - u'count': 3, - u'limit': 20, - u'offset': 0 - } - } - - self.mocker.result(result) - self.mocker.replay() - res = self.client.CampaignCategories.get() - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - def test_get_campaigns_categories_with_pagination(self): - self.set_mocker(CampaignCategories.URL, limit=3) - result = { - u'results': [ - { - u'id': 3, - u'name': u'Браузерные', - u'parent': { - u'id': 2, - u'name': u'Онлайн-игр', - u'parent': None - } - }, - { - u'id': 5, - u'name': u'Другая', - u'parent': None - }, - { - u'id': 4, - u'name': u'Клиентские', - u'parent': { - u'id': 2, - u'name': u'Онлайн-игры', - u'parent': None - } - } - ], - u'_meta': { - u'count': 3, - u'limit': 3, - u'offset': 0 - } - } - - self.mocker.result(result) - self.mocker.replay() - res = self.client.CampaignCategories.get(limit=3) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 3) - self.mocker.verify() - - def test_get_campaigns_categories_with_id(self): - self.set_mocker( - CampaignCategories.SINGLE_URL, id=3, with_pagination=False) - result = { - u'id': 3, - u'name': u'Браузерные', - u'parent': { - u'id': 2, - u'name': u'Онлайн-игр', - u'parent': None - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.CampaignCategories.getOne(3) - self.assertEqual(res[u'id'], 3) - self.mocker.verify() - - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/tests/test_banners.py b/pyadmitad/tests/test_banners.py deleted file mode 100644 index d14516e..0000000 --- a/pyadmitad/tests/test_banners.py +++ /dev/null @@ -1,97 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import Banners, BannersForWebsite -from pyadmitad.tests.base import BaseTestCase - - -class BannersTestCase(BaseTestCase): - - def test_get_banners_request(self): - self.set_mocker(Banners.URL, id=6, limit=1) - result = { - u'_meta': { - u'count': 5, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'banner_image': u'https://admitad.com/media/image.png', - u'creation_date': u'2013-01-18 20:13:27', - u'flashobj_url': u'', - u'id': 1, - u'image_url': u'', - u'is_flash': False, - u'name': u'Gmail Banner', - u'size_height': 39, - u'size_width': 94, - u'traffic_url': u'', - u'type': u'jpeg' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Banners.get(6, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() - - -class BannersForWebsiteTestCase(BaseTestCase): - - def test_get_banners_request(self): - self.set_mocker(BannersForWebsite.URL, id=6, w_id=22, limit=1) - result = { - u'_meta': { - u'count': 5, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'banner_image': u'https://admitad.com/media/image.png', - u'creation_date': u'2011-01-13 20:13:27', - u'direct_link': u'http://ad.admitad.com/goto/XXXXX/', - u'flashobj_url': u'', - u'html_code': { - u'async': u'see https://www.admitad.com/ru/doc/api/' - u'methods/banners/banners-website/', - u'flash': u'see https://www.admitad.com/ru/doc/api/' - u'methods/banners/banners-website/', - u'full': u'see https://www.admitad.com/ru/doc/api/' - u'methods/banners/banners-website/', - u'image': u'see https://www.admitad.com/ru/doc/api/' - u'methods/banners/banners-website/', - u'sync': u'see https://www.admitad.com/ru/doc/api/' - u'methods/banners/banners-website/', - }, - u'id': 1, - u'image_url': u'', - u'is_flash': False, - u'name': u'Gmail Banner', - u'size_height': 39, - u'size_width': 94, - u'traffic_url': u'', - u'type': u'jpeg' - } - ] - } - - self.mocker.result(result) - self.mocker.replay() - res = self.client.BannersForWebsite.get(6, 22, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() - - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/tests/test_campaigns.py b/pyadmitad/tests/test_campaigns.py deleted file mode 100644 index b040209..0000000 --- a/pyadmitad/tests/test_campaigns.py +++ /dev/null @@ -1,311 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import Campaigns, CampaignsForWebsite,\ - CampaignsManage -from pyadmitad.tests.base import BaseTestCase - - -CAMPAIGNS_RESULT = { - "results": [ - { - "status": "active", - "rating": "5.00", - "description": "Gmail is a mail service by google", - "actions": [ - { - "payment_size": "50.00", - "hold_time": 120, - "percentage": True, - "name": "action name", - "id": 1 - }, - { - "payment_size": "12.00", - "hold_time": 30, - "percentage": True, - "name": "Покупка", - "id": 15 - }, - { - "payment_size": "11.00", - "hold_time": 15, - "percentage": False, - "name": "Регистрация", - "id": 11 - } - ], - "site_url": "http://www.gmail.com/", - "regions": [ - { - "region": "01" - }, - { - "region": "BY" - }, - { - "region": "CA" - }, - { - "region": "DE" - }, - { - "region": "KZ" - }, - { - "region": "RU" - }, - { - "region": "US" - } - ], - "currency": "USD", - "cr": None, - "ecpc": None, - "id": 6, - "categories": [ - { - "name": "Магазин", - "parent": None, - "id": 1 - }, - { - "name": "Онлайн-игры", - "parent": None, - "id": 2 - }, - { - "name": "Браузерные", - "parent": { - "name": "Онлайн-игры", - "parent": None, - "id": 2 - }, - "id": 3 - } - ], - "name": "Campaign2" - } - ], - "_meta": { - "count": 4, - "limit": 1, - "offset": 0 - } -} - -CAMPAIGNS_FOR_WEBSITE_RESULT = { - "results": [ - { - "status": "active", - "rating": "5.00", - "traffics": [ - { - "enabled": False, - "name": "Тип 1", - "id": 1 - }, - { - "enabled": False, - "name": "Тип 2", - "id": 2 - } - ], - "ecpc": None, - "description": "Gmail is a mail service by google", - "name": "AdvCamp 1", - "gotolink": "http://ad.admitad.com/goto/some_link/", - "avg_hold_time": None, - "actions": [ - { - "payment_size": "50.00", - "hold_time": 120, - "percentage": None, - "name": "action name", - "id": 1 - }, - { - "payment_size": "12.00", - "hold_time": 30, - "percentage": True, - "name": "Покупка", - "id": 15 - }, - { - "payment_size": "11.00", - "hold_time": 15, - "percentage": True, - "name": "Регистрация", - "id": 11 - } - ], - "site_url": "http://www.gmail.com/", - "regions": [ - { - "region": "01" - }, - { - "region": "BY" - }, - { - "region": "CA" - }, - { - "region": "DE" - }, - { - "region": "KZ" - }, - { - "region": "RU" - }, - { - "region": "US" - } - ], - "currency": "USD", - "goto_cookie_lifetime": 45, - "geotargeting": True, - "cr": None, - "activation_date": "2010-03-31 19:05:39", - "max_hold_time": 120, - "id": 6, - "categories": [ - { - "name": "Магазин", - "parent": None, - "id": 1 - }, - { - "name": "Онлайн-игры", - "parent": None, - "id": 2 - }, - { - "name": "Браузерные", - "parent": { - "name": "Онлайн-игры", - "parent": None, - "id": 2 - }, - "id": 3 - }, - { - "name": "Другая", - "parent": None, - "id": 5 - }, - { - "name": "Финансы", - "parent": { - "name": "Другая", - "parent": None, - "id": 5 - }, - "id": 6 - }, - { - "name": "Подкатегория", - "parent": { - "name": "Другая", - "parent": None, - "id": 5 - }, - "id": 17 - } - ], - "percentage_of_confirmed": None - } - ], - "_meta": { - "count": 4, - "limit": 1, - "offset": 0 - } -} - -CAMPAIGN_CONNECT_RESULT = { - "message": "Заявка на добавление кампании Campaign успешно создана.", - "success": "OK" -} - -CAMPAIGN_DISCONNECT_RESULT = { - "message": "Кампания Campaign была удалена из ваших предложений." - " Вы можете позже добавить ее снова.", - "success": "Deleted" -} - - -class CampaignsTestCase(BaseTestCase): - - def test_get_campaigns_request(self): - self.set_mocker(Campaigns.URL, limit=1) - self.mocker.result(CAMPAIGNS_RESULT) - self.mocker.replay() - res = self.client.Campaigns.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() - - def test_get_campaigns_request_with_id(self): - self.set_mocker(Campaigns.SINGLE_URL, id=6, with_pagination=False) - self.mocker.result(CAMPAIGNS_RESULT['results'][0]) - self.mocker.replay() - res = self.client.Campaigns.getOne(6) - self.assertEqual(res[u'id'], 6) - self.mocker.verify() - - -class CampaignsForWebsiteTestCase(BaseTestCase): - - def test_get_campaigns_for_websites_request(self): - self.set_mocker(CampaignsForWebsite.URL, id=22, limit=1) - self.mocker.result(CAMPAIGNS_FOR_WEBSITE_RESULT) - self.mocker.replay() - res = self.client.CampaignsForWebsite.get(22, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() - - def test_get_campaigns_request_with_id(self): - self.set_mocker( - CampaignsForWebsite.SINGLE_URL, id=22, - c_id=6, with_pagination=False) - self.mocker.result(CAMPAIGNS_FOR_WEBSITE_RESULT['results'][0]) - self.mocker.replay() - res = self.client.CampaignsForWebsite.getOne(22, 6) - self.assertEqual(res[u'id'], 6) - self.mocker.verify() - - -class CampaignsConnectWebsiteTestCase(BaseTestCase): - - def test_campaign_connect_websites_request(self): - self.set_mocker(CampaignsManage.CONNECT_URL, w_id=22, - c_id=6, with_pagination=False, method='POST') - self.mocker.result(CAMPAIGN_CONNECT_RESULT) - self.mocker.replay() - res = self.client.CampaignsManage.connect(c_id=6, w_id=22) - self.assertIn(u'message', res) - self.assertIn(u'success', res) - self.mocker.verify() - - def test_campaign_disconnect_websites_request(self): - self.set_mocker(CampaignsManage.DISCONNECT_URL, w_id=22, - c_id=6, with_pagination=False, method='POST') - self.mocker.result(CAMPAIGN_CONNECT_RESULT) - self.mocker.replay() - res = self.client.CampaignsManage.disconnect(c_id=6, w_id=22) - self.assertIn(u'message', res) - self.assertIn(u'success', res) - self.mocker.verify() - - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/tests/test_coupons.py b/pyadmitad/tests/test_coupons.py deleted file mode 100644 index d9caaf4..0000000 --- a/pyadmitad/tests/test_coupons.py +++ /dev/null @@ -1,222 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import Coupons, CouponsForWebsite -from pyadmitad.tests.base import BaseTestCase - - -class CouponsTestCase(BaseTestCase): - - def test_get_coupons_request(self): - self.set_mocker(Coupons.URL, limit=1) - result = { - u'results': [ - { - u'campaign': { - u'id': 8, - u'name': u'AdvCamp 3' - }, - u'categories': [ - { - u'id': 1, - u'name': u'Детские товары' - }, - { - u'id': 3, - u'name': u'Мода & аксессуары' - }, - { - u'id': 4, - u'name': u'Обувь женская & мужская' - } - ], - u'date_end': u'2013-05-10 23:59:59', - u'date_start': u'2011-11-02 00:00:00', - u'description': u'', - u'exclusive': False, - u'id': 1, - u'image': u'https://admitad.com/media/path_img.png', - u'name': u'Купон', - u'rating': u'0.00', - u'short_name': u'coupon', - u'species': u'promocode', - u'status': u'active', - u'types': [ - { - u'id': 1, - u'name': u'Бесплатная доставка' - } - ] - } - ], - u'_meta': { - u'count': 6, - u'limit': 1, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Coupons.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() - - def test_get_coupons_request_with_id(self): - self.set_mocker(Coupons.SINGLE_URL, id=1, with_pagination=False) - result = { - u'campaign': { - u'id': 8, - u'name': u'AdvCamp 3' - }, - u'categories': [ - { - u'id': 1, - u'name': u'Детские товары' - }, - { - u'id': 3, - u'name': u'Мода & аксессуары' - }, - { - u'id': 4, - u'name': u'Обувь женская & мужская' - } - ], - u'date_end': u'2013-05-10 23:59:59', - u'date_start': u'2011-11-02 00:00:00', - u'description': u'', - u'exclusive': False, - u'id': 1, - u'image': u'https://admitad.com/media/path_img.png', - u'name': u'Купон', - u'rating': u'0.00', - u'short_name': u'coupon', - u'species': u'promocode', - u'status': u'active', - u'types': [ - { - u'id': 1, - u'name': u'Бесплатная доставка' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Coupons.getOne(1) - self.assertEqual(res[u'id'], 1) - self.mocker.verify() - - -class CouponsForWebsiteTestCase(BaseTestCase): - - def test_get_coupons_for_website_request(self): - self.set_mocker(CouponsForWebsite.URL, id=3, limit=1) - result = { - u'results': [ - { - u'campaign': { - u'id': 8, - u'name': u'AdvCamp 3' - }, - u'categories': [ - { - u'id': 1, - u'name': u'Детские товары' - }, - { - u'id': 3, - u'name': u'Мода & аксессуары' - }, - { - u'id': 4, - u'name': u'Обувь женская & мужская' - } - ], - u'date_end': u'2013-05-10 23:59:59', - u'date_start': u'2011-11-02 00:00:00', - u'description': u'', - u'exclusive': False, - u'id': 1, - u'image': u'https://admitad.com/media/path_img.png', - u'name': u'Купон', - u'rating': u'0.00', - u'short_name': u'coupon', - u'species': u'promocode', - u'status': u'active', - u'types': [ - { - u'id': 1, - u'name': u'Бесплатная доставка' - } - ] - } - ], - u'_meta': { - u'count': 6, - u'limit': 1, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.CouponsForWebsite.get(_id=3, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() - - def test_get_coupons_for_website_request_with_id(self): - self.set_mocker( - CouponsForWebsite.SINGLE_URL, id=3, c_id=1, with_pagination=False) - result = { - u'campaign': { - u'id': 8, - u'name': u'AdvCamp 3' - }, - u'categories': [ - { - u'id': 1, - u'name': u'Детские товары' - }, - { - u'id': 3, - u'name': u'Мода & аксессуары' - }, - { - u'id': 4, - u'name': u'Обувь женская & мужская' - } - ], - u'date_end': u'2013-05-10 23:59:59', - u'date_start': u'2011-11-02 00:00:00', - u'description': u'', - u'exclusive': False, - u'id': 1, - u'image': u'https://admitad.com/media/path_img.png', - u'name': u'Купон', - u'rating': u'0.00', - u'short_name': u'coupon', - u'species': u'promocode', - u'status': u'active', - u'types': [ - { - u'id': 1, - u'name': u'Бесплатная доставка' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.CouponsForWebsite.getOne(3, 1) - self.assertEqual(res[u'id'], 1) - self.mocker.verify() - - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/tests/test_me.py b/pyadmitad/tests/test_me.py deleted file mode 100644 index 4469e09..0000000 --- a/pyadmitad/tests/test_me.py +++ /dev/null @@ -1,55 +0,0 @@ -import unittest -from pyadmitad.items import * -from pyadmitad.tests.base import BaseTestCase - - -class MeTestCase(BaseTestCase): - - def test_me_request(self): - self.set_mocker(Me.URL, with_pagination=False) - result = { - 'username': 'username', - 'first_name': 'first_name', - 'last_name': 'last_name', - 'id': 1, - 'language': 'ru' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Me.get() - self.assertEqual(res['username'], 'username') - self.assertEqual(res['first_name'], 'first_name') - self.assertEqual(res['id'], 1) - self.assertEqual(res['language'], 'ru') - self.mocker.verify() - - -class BalanceTestCase(BaseTestCase): - - def test_balance_request(self): - self.set_mocker(Balance.URL, with_pagination=False) - result = [ - { - 'currency': 'USD', - 'balance': '20000.00' - }, - { - 'currency': 'EUR', - 'balance': '0.00' - }, - { - 'currency': 'RUB', - 'balance': '0.00' - } - ] - self.mocker.result(result) - self.mocker.replay() - res = self.client.Balance.get() - self.assertEqual(len(res), 3) - self.assertIn('balance', res[0]) - self.assertIn('currency', res[0]) - self.mocker.verify() - - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/tests/test_money_transfer.py b/pyadmitad/tests/test_money_transfer.py deleted file mode 100644 index 127ec09..0000000 --- a/pyadmitad/tests/test_money_transfer.py +++ /dev/null @@ -1,112 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import MoneyTransfers, MoneyTransfersManage -from pyadmitad.tests.base import BaseTestCase - - -MONEY_TRANSFER_CREATE_DATA = dict( - currency='USD', - comment="test", - recipient="admitadppvweb", - sum='200.12', -) - - -class MoneyTransfersTestCase(BaseTestCase): - - def test_get_money_transfers_request(self): - self.set_mocker(MoneyTransfers.URL, limit=1) - result = { - "_meta": { - "count": 6, - "limit": 1, - "offset": 0 - }, - "results": [ - { - "comment": "test", - "sender": { - "username": "webmaster1", - "id": 96 - }, - "sum": 200.0, - "currency": "USD", - "date_created": "2013-12-06T12:28:29", - "recipient": { - "username": "admitadppvweb", - "id": 100 - }, - "id": 8 - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.MoneyTransfers.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(res[u'results'][0][u'currency'], u'USD') - self.mocker.verify() - - def test_get_money_transfers_request_with_id(self): - self.set_mocker(MoneyTransfers.SINGLE_URL, id=8, with_pagination=False) - result = { - "comment": "test", - "sender": { - "username": "webmaster1", - "id": 96 - }, - "sum": 200.0, - "currency": "USD", - "date_created": "2013-12-06T12:28:29", - "recipient": { - "username": "admitadppvweb", - "id": 100 - }, - "id": 8 - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.MoneyTransfers.getOne(8) - self.assertEqual(res[u'id'], 8) - self.mocker.verify() - - -class MoneyTransfersManageTestCase(BaseTestCase): - - def test_create_payments_request(self): - self.set_mocker(MoneyTransfersManage.CREATE_URL, - method='POST', - with_pagination=False, - data=MONEY_TRANSFER_CREATE_DATA) - result = { - "comment": "test", - "sender": { - "username": "webmaster1", - "id": 96 - }, - "sum": 200.12, - "currency": "USD", - "date_created": "2013-12-06T12:28:29", - "recipient": { - "username": "admitadppvweb", - "id": 100 - }, - "id": 9 - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.MoneyTransfersManage.create( - **MONEY_TRANSFER_CREATE_DATA) - self.assertEqual(res[u'comment'], u'test') - self.assertEqual(res[u'currency'], u'USD') - self.assertEqual(res[u'sum'], 200.12) - self.assertEqual(res[u'sender'][u'username'], u'webmaster1') - self.mocker.verify() - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/tests/test_payments.py b/pyadmitad/tests/test_payments.py deleted file mode 100644 index 62c406e..0000000 --- a/pyadmitad/tests/test_payments.py +++ /dev/null @@ -1,116 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import Payments, PaymentsManage -from pyadmitad.tests.base import BaseTestCase - - -class PaymentsTestCase(BaseTestCase): - - def test_get_payments_request(self): - self.set_mocker(Payments.URL, limit=1) - result = { - u'_meta': { - u'count': 6, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'comment': u'', - u'currency': u'USD', - u'datetime': u'2012-05-27 19:45:07', - u'id': 68, - u'payment_sum': u'2000.00', - u'status': u'pending', - u'withdrawal_type': u'webmoney' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Payments.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(res[u'results'][0][u'currency'], u'USD') - self.mocker.verify() - - def test_get_payments_request_with_id(self): - self.set_mocker(Payments.SINGLE_URL, id=68, with_pagination=False) - result = { - u'comment': u'', - u'currency': u'USD', - u'datetime': u'2012-05-27 19:45:07', - u'id': 68, - u'payment_sum': u'2000.00', - u'status': u'pending', - u'withdrawal_type': u'webmoney' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Payments.getOne(68) - self.assertEqual(res[u'id'], 68) - self.mocker.verify() - - -class PaymentsManageTestCase(BaseTestCase): - - def test_create_payments_request(self): - self.set_mocker(PaymentsManage.CREATE_URL, - method='POST', with_pagination=False, code='EUR') - result = { - u'comment': u'', - u'currency': u'EUR', - u'datetime': u'2013-04-24 15:07:47', - u'id': 71, - u'payment_sum': u'10000', - u'status': u'draft', - u'withdrawal_type': u'' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.PaymentsManage.create('EUR') - self.assertEqual(res[u'status'], u'draft') - self.assertEqual(res[u'currency'], u'EUR') - self.assertEqual(res[u'id'], 71) - self.mocker.verify() - - def test_confirm_payments_request(self): - self.set_mocker(PaymentsManage.CONFIRM_URL, - method='POST', with_pagination=False, id=71) - result = { - u'comment': u'', - u'currency': u'EUR', - u'datetime': u'2013-04-24 15:07:47', - u'id': 71, - u'payment_sum': u'10000.00', - u'status': u'pending', - u'withdrawal_type': u'webmoney' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.PaymentsManage.confirm(71) - self.assertEqual(res[u'status'], u'pending') - self.assertEqual(res[u'currency'], u'EUR') - self.assertEqual(res[u'id'], 71) - self.mocker.verify() - - def test_delete_payments_request(self): - self.set_mocker(PaymentsManage.DELETE_URL, - method='POST', with_pagination=False, id=71) - result = { - u'message': u'Заявка удалена успешно.', - u'success': u'Deleted' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.PaymentsManage.delete(71) - self.assertIn('success', res) - self.mocker.verify() - - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/tests/test_products.py b/pyadmitad/tests/test_products.py deleted file mode 100644 index f5b61ef..0000000 --- a/pyadmitad/tests/test_products.py +++ /dev/null @@ -1,226 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import ProductVendors, ProductCategories,\ - ProductCampaigns, Products -from pyadmitad.tests.base import BaseTestCase - - -PRODUCTS_RESULT = { - u'_meta': { - u'count': 156, - u'limit': 1, - u'offset': 1 - }, - u'results': [ - { - u'advcampaign': { - u'id': 6, - u'name': u'AdvCamp 1' - }, - u'available': True, - u'category': { - u'id': 3, - u'name': u'category-child1' - }, - u'currency': u'RUB', - u'description': None, - u'id': 2, - u'model': u'JAISALMER', - u'name': u'Свеча ароматическая Comme des Garcons', - u'param': { - u'Пол': u'Уни', - u'Размер': u'145 гр.' - }, - u'picture': u'http://cdn.admitad.com/some_file.jpg', - u'picture_orig': u'http://content.some/path/file.jpg', - u'price': 3900.0, - u'thumbnail': u'http://cdn.admitad.com/some_file.jpg', - u'typePrefix': u'Свеча ароматическая', - u'updated': u'2012-08-30 21:35:26', - u'url': u'http://ad.admitad.com/goto/' - u'195b832b828cb0fd8d17234642e5a7/?ulp=' - u'[[[http://www.boutique.ru/jewelleryandgifts/' - u'svechy_in_gifts/commedesgarcons/' - u'e9aeb173-a43a-11dd-892e-00304833051e]]]', - u'vendor': { - u'id': 1, - u'name': u'Comme des Garcons' - } - } - ] -} - - -class ProductVendorsTestCase(BaseTestCase): - - def test_get_product_vendors_request(self): - self.set_mocker(ProductVendors.URL, limit=1) - result = { - u'_meta': { - u'count': 752, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'id': 1, - u'name': u'Comme des Garcons' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductVendors.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() - - def test_get_product_vendors_with_id_request(self): - self.set_mocker(ProductVendors.SINGLE_URL, id=1, with_pagination=False) - result = { - u'id': 1, - u'name': u'Comme des Garcons' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductVendors.getOne(1) - self.assertEqual(res[u'id'], 1) - self.mocker.verify() - - -class ProductCategoriesTestCase(BaseTestCase): - - def test_get_product_categories_request(self): - self.set_mocker(ProductCategories.URL, limit=4) - result = { - u'_meta': { - u'count': 4, - u'limit': 4, - u'offset': 0 - }, - u'results': [ - { - u'id': 1, - u'name': u'category1' - }, - { - u'id': 2, - u'name': u'category2' - }, - { - u'id': 3, - u'name': u'category-child1', - u'parent': { - u'id': 1, - u'name': u'category1', - u'parent': None - } - }, - { - u'id': 4, - u'name': u'category4' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductCategories.get(limit=4) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 4) - self.assertEqual(len(res[u'results']), 4) - self.mocker.verify() - - def test_get_product_categories_with_id_request(self): - self.set_mocker( - ProductCategories.SINGLE_URL, id=1, with_pagination=False) - result = { - u'id': 1, - u'name': u'category1' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductCategories.getOne(1) - self.assertEqual(res[u'id'], 1) - self.mocker.verify() - - -class ProductCampaignsTestCase(BaseTestCase): - - def test_get_product_campaigns_request(self): - self.set_mocker(ProductCampaigns.URL, id=25, limit=1) - result = { - u'results': [ - { - u'count': 189, - u'id': 6, - u'name': u'AdvCamp 1' - } - ], - u'_meta': { - u'count': 2, - u'limit': 1, - u'offset': 0 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductCampaigns.get(25, limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(len(res[u'results']), 1) - self.mocker.verify() - - def test_get_product_campaigns_with_id_request(self): - self.set_mocker( - ProductCampaigns.SINGLE_URL, id=25, c_id=6, with_pagination=False) - result = { - u'count': 189, - u'id': 6, - u'name': u'AdvCamp 1' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.ProductCampaigns.getOne(25, 6) - self.assertEqual(res[u'id'], 6) - self.mocker.verify() - - -class ProductsTestCase(BaseTestCase): - - def test_get_products_request(self): - self.set_mocker(Products.URL, id=25, limit=1, offset=1) - result = PRODUCTS_RESULT - self.mocker.result(result) - self.mocker.replay() - res = self.client.Products.get(25, limit=1, offset=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.assertEqual(len(res[u'results']), 1) - self.mocker.verify() - - def test_get_products_with_id_request(self): - self.set_mocker( - Products.SINGLE_URL, id=25, p_id=2, with_pagination=False) - result = PRODUCTS_RESULT['results'][0] - self.mocker.result(result) - self.mocker.replay() - res = self.client.Products.getOne(25, 2) - self.assertEqual(res[u'id'], 2) - self.mocker.verify() - - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/tests/test_referrals.py b/pyadmitad/tests/test_referrals.py deleted file mode 100644 index b76a23b..0000000 --- a/pyadmitad/tests/test_referrals.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import Referrals -from pyadmitad.tests.base import BaseTestCase - - -class ReferralsTestCase(BaseTestCase): - - def test_get_referrals_request(self): - self.set_mocker(Referrals.URL, limit=1) - result = { - u'_meta': { - u'count': 2, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'id': 8, - u'payment': None, - u'username': u'username' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Referrals.get(limit=1) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() - - def test_get_referrals_request_with_id(self): - self.set_mocker(Referrals.SINGLE_URL, id=8, with_pagination=False) - result = { - u'id': 8, - u'payment': None, - u'username': u'username'} - self.mocker.result(result) - self.mocker.replay() - res = self.client.Referrals.getOne(8) - self.assertEqual(res[u'id'], 8) - self.mocker.verify() - - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/tests/test_statistics.py b/pyadmitad/tests/test_statistics.py deleted file mode 100644 index c7713e7..0000000 --- a/pyadmitad/tests/test_statistics.py +++ /dev/null @@ -1,407 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import StatisticWebsites, StatisticCampaigns,\ - StatisticDays, StatisticMonths, StatisticActions, StatisticSubIds,\ - StatisticSources, StatisticKeywords -from pyadmitad.tests.base import BaseTestCase - - -class StatisticsWebsitesTestCase(BaseTestCase): - - def test_get_statistics_websites_request(self): - self.set_mocker( - StatisticWebsites.URL, - website=22, - allowed_filtering=StatisticWebsites.FILTERING, - allowed_ordering=StatisticWebsites.ORDERING - ) - result = { - u'results': [ - { - u'clicks': 184, - u'cr': 0.3, - u'ctr': 0.03, - u'currency': u'RUB', - u'ecpc': 124.77, - u'ecpm': 4403.26, - u'leads_sum': 61, - u'payment_sum_approved': 1870.67, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 21087.97, - u'sales_sum': 10, - u'views': 5214, - u'website_id': 22, - u'website_name': u'website' - } - ], - u'_meta': { - u'count': 1, - u'limit': 20, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticWebsites.get(website=22) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'results'][0][u'website_id'], 22) - self.mocker.verify() - - -class StatisticsCampaignTestCase(BaseTestCase): - - def test_get_statistics_campaign_request(self): - self.set_mocker( - StatisticCampaigns.URL, - campaign=9, - allowed_filtering=StatisticCampaigns.FILTERING, - allowed_ordering=StatisticCampaigns.ORDERING - ) - result = { - u'results': [ - { - u'advcampaign_id': 9, - u'advcampaign_name': u'Campaign', - u'clicks': 35, - u'cr': 0.1143, - u'ctr': 0.4487, - u'currency': u'RUB', - u'ecpc': 5.714286, - u'ecpm': 2564.102564, - u'leads_sum': 4, - u'payment_sum_approved': 0.0, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 200.0, - u'sales_sum': 0, - u'views': 78 - }, - ], - u'_meta': { - u'count': 1, - u'limit': 20, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticCampaigns.get(campaign=9) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'results'][0][u'advcampaign_id'], 9) - self.mocker.verify() - - -class StatisticsDaysTestCase(BaseTestCase): - - def test_get_statistics_days_request(self): - self.set_mocker( - StatisticDays.URL, - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticDays.FILTERING, - allowed_ordering=StatisticDays.ORDERING - ) - result = { - u'results': [ - { - u'clicks': 3, - u'cr': 0.3333, - u'ctr': 0.0, - u'currency': u'RUB', - u'date': u'2013-01-12', - u'ecpc': 27.88, - u'ecpm': 0.0, - u'leads_sum': 1, - u'payment_sum_approved': 83.65, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 0.0, - u'sales_sum': 0, - u'views': 0 - } - ], - u'_meta': { - u'count': 3, - u'limit': 1, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticDays.get( - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - -class StatisticsMonthsTestCase(BaseTestCase): - - def test_get_statistics_months_request(self): - self.set_mocker( - StatisticMonths.URL, - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticMonths.FILTERING, - allowed_ordering=StatisticMonths.ORDERING - ) - result = { - u'results': [ - { - u'clicks': 3, - u'cr': 0.3333, - u'ctr': 0.0, - u'currency': u'RUB', - u'date': u'2013-01-12', - u'ecpc': 27.88, - u'ecpm': 0.0, - u'leads_sum': 1, - u'payment_sum_approved': 83.65, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 0.0, - u'sales_sum': 0, - u'views': 0 - } - ], - u'_meta': { - u'count': 3, - u'limit': 1, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticMonths.get( - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - -class StatisticsActionsTestCase(BaseTestCase): - - def test_get_statistics_actions_request(self): - self.set_mocker( - StatisticActions.URL, - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticActions.FILTERING, - allowed_ordering=StatisticActions.ORDERING - ) - result = { - u'results': [ - { - u'action': u'action name', - u'action_date': u'2013-01-15 18:23:54', - u'action_id': 281, - u'advcampaign_id': 9, - u'advcampaign_name': u'Campaign', - u'cart': 777.0, - u'click_date': u'2011-01-13 18:23:50', - u'closing_date': u'2012-04-02', - u'status_updated': u'2011-09-16 23:13:35', - u'comment': None, - u'conversion_time': 4, - u'currency': u'RUB', - u'keyword': None, - u'payment': 50.0, - u'status': u'pending', - u'subid': None, - u'subid1': None, - u'subid2': None, - u'subid3': None, - u'subid4': None, - u'website_name': u'site1_of_webmaster1' - } - ], - u'_meta': { - u'count': 89, - u'limit': 1, - u'offset': 0 - }, - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticActions.get( - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - -class StatisticsSubIdsTestCase(BaseTestCase): - - def test_get_statistics_sub_ids_request(self): - self.set_mocker( - StatisticSubIds.URL % '', - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticSubIds.FILTERING, - allowed_ordering=StatisticSubIds.ORDERING - ) - result = { - u'_meta': { - u'count': 1, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'clicks': 1, - u'cr': 89.0, - u'currency': u'RUB', - u'ecpc': 5202.5, - u'leads_sum': 89, - u'payment_sum_approved': 5002.5, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 200.0, - u'sales_sum': 0, - u'subid': u'sub' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticSubIds.get( - campaign=9, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - -class StatisticsSourcesTestCase(BaseTestCase): - - def test_get_statistics_sources_request(self): - self.set_mocker( - StatisticSources.URL, - campaign=6, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticSources.FILTERING, - allowed_ordering=StatisticSources.ORDERING - ) - result = { - u'_meta': { - u'count': 2, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'clicks': 7, - u'cr': 0.1429, - u'currency': u'RUB', - u'ecpc': 51.785714, - u'leads_sum': 1, - u'payment_sum_approved': 0.0, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 362.5, - u'sales_sum': 0, - u'source': u'g', - u'source_name': u'Google Adwords' - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticSources.get( - campaign=6, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - -class StatisticsKeywordsTestCase(BaseTestCase): - - def test_get_statistics_keywords_request(self): - self.set_mocker( - StatisticKeywords.URL, - campaign=6, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1, - allowed_filtering=StatisticKeywords.FILTERING, - allowed_ordering=StatisticKeywords.ORDERING - ) - result = { - u'_meta': { - u'count': 3, - u'limit': 1, - u'offset': 0 - }, - u'results': [ - { - u'clicks': 2, - u'cr': 0.0, - u'currency': u'RUB', - u'ecpc': 0.0, - u'keyword': u'keyword', - u'leads_sum': 0, - u'payment_sum_approved': 0.0, - u'payment_sum_declined': 0.0, - u'payment_sum_open': 0.0, - u'sales_sum': 0 - } - ] - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.StatisticKeywords.get( - campaign=6, - date_start='01.01.2013', - date_end='01.31.2013', - limit=1 - ) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.mocker.verify() - - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/tests/test_websites.py b/pyadmitad/tests/test_websites.py deleted file mode 100644 index 4480bed..0000000 --- a/pyadmitad/tests/test_websites.py +++ /dev/null @@ -1,240 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -from pyadmitad.items import Websites, WebsitesManage -from pyadmitad.tests.base import BaseTestCase - - -WEBSITE_CREATE_DATA = dict( - regions=['RU'], - atnd_hits='20', - atnd_visits='10', - name='website1', - language='ru', - site_url='http://google.com', - description='descriptiondescriptiondescriptiondescription' - 'descriptiondescriptiondescriptiondescription' - 'descriptiondescription', - categories=['1', '2'], - kind='website' -) - - -class WebsitesTestCase(BaseTestCase): - - def test_get_websites_request(self): - self.set_mocker(Websites.URL, limit=1, offset=2) - result = { - u'results': [ - { - u'status': u'active', - u'kind': u'website', - u'is_old': True, - u'name': u'site', - u'language': 'ru', - u'description': u'site', - u'verification_code': u'59505879f5', - u'creation_date': u'2010-03-31 18:25:19', - u'regions': [ - { - u'region': u'RU', - u'id': 5 - } - ], - u'atnd_visits': 100, - u'adservice': None, - u'site_url': u'http://www.mail.ru/', - u'id': 22, - u'categories': [ - { - u'name': u'Категория', - u'parent': None, - u'id': 5 - } - ], - u'atnd_hits': 0 - } - ], - u'_meta': { - u'count': 4, - u'limit': 1, - u'offset': 2 - } - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Websites.get(limit=1, offset=2) - self.assertIn(u'results', res) - self.assertIn(u'_meta', res) - self.assertIsInstance(res[u'results'], list) - self.assertIsInstance(res[u'_meta'], dict) - self.assertEqual(res[u'_meta'][u'limit'], 1) - self.mocker.verify() - - def test_get_websites_request_with_id(self): - self.set_mocker(Websites.SINGLE_URL, id=22, with_pagination=False) - result = { - u'status': u'active', - u'kind': u'website', - u'is_old': True, - u'name': u'site', - u'language': 'ru', - u'description': u'site', - u'verification_code': u'59505879f5', - u'creation_date': u'2010-03-31 18:25:19', - u'regions': [ - { - u'region': u'RU', - u'id': 5 - } - ], - u'atnd_visits': 100, - u'adservice': None, - u'site_url': u'http://www.mail.ru/', - u'id': 22, - u'categories': [ - { - u'name': u'Категория', - u'parent': None, - u'id': 5 - } - ], - u'atnd_hits': 0 - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.Websites.getOne(22) - self.assertEqual(res[u'id'], 22) - self.mocker.verify() - - -class WebsitesManageTestCase(BaseTestCase): - - def test_create_website_request(self): - self.set_mocker( - WebsitesManage.CREATE_URL, - method='POST', - with_pagination=False, - data=WEBSITE_CREATE_DATA) - result = { - u'atnd_hits': 20, - u'atnd_visits': 10, - u'categories': [ - { - u'id': 1, - u'name': u'Магазин', - u'parent': None - }, - { - u'id': 2, - u'name': u'Онлайн-игры', - u'parent': None - } - ], - u'creation_date': u'2013-04-22 14:41:29', - u'description': u'descriptiondescriptiondescriptiondescription' - u'descriptiondescriptiondescriptiondescription' - u'descriptiondescription', - u'id': 52, - u'is_old': False, - u'kind': u'website', - u'language': u'ru', - u'name': u'website1', - u'regions': [ - { - u'id': 25, - u'region': u'RU' - } - ], - u'site_url': u'http://google.com/', - u'status': u'new', - u'verification_code': u'fde88f4b6b' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsitesManage.create(**WEBSITE_CREATE_DATA) - self.assertIn(u'id', res) - self.assertEqual(u'new', res['status']) - self.assertEqual(u'website', res['kind']) - self.mocker.verify() - - def test_update_website_request(self): - self.set_mocker( - WebsitesManage.UPDATE_URL, - id=52, - method='POST', - with_pagination=False, - data={'language': 'de', 'name': 'test-update'}) - result = { - u'atnd_hits': 20, - u'atnd_visits': 10, - u'categories': [ - { - u'id': 1, - u'name': u'Магазин', - u'parent': None - }, - { - u'id': 2, - u'name': u'Онлайн-игры', - u'parent': None - } - ], - u'creation_date': u'2013-04-22 14:41:29', - u'description': u'descriptiondescriptiondescriptiondescription' - u'descriptiondescriptiondescriptiondescription' - u'descriptiondescription', - u'id': 52, - u'is_old': False, - u'kind': u'website', - u'language': u'de', - u'name': u'test-update', - u'regions': [ - { - u'id': 25, - u'region': u'RU' - } - ], - u'site_url': u'http://google.com/', - u'status': u'new', - u'verification_code': u'fde88f4b6b' - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsitesManage.update( - 52, language='de', name='test-update') - self.assertIn(u'id', res) - self.assertEqual(u'test-update', res['name']) - self.assertEqual(u'de', res['language']) - self.mocker.verify() - - def test_verify_website_request(self): - self.set_mocker( - WebsitesManage.VERIFY_URL, - id=52, method='POST', with_pagination=False) - result = { - "message": "Площадка прошла автоматическую проверку." - " Ожидайте подтверждения администрацией.", - "success": "Accepted" - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsitesManage.verify(52) - self.assertIn(u'success', res) - - def test_delete_website_request(self): - self.set_mocker( - WebsitesManage.DELETE_URL, - id=52, method='POST', with_pagination=False) - result = { - "message": "Площадка удалена успешно.", - "success": "Deleted" - } - self.mocker.result(result) - self.mocker.replay() - res = self.client.WebsitesManage.delete(52) - self.assertIn(u'success', res) - - -if __name__ == '__main__': - unittest.main() diff --git a/pyadmitad/transport.py b/pyadmitad/transport.py deleted file mode 100644 index 42694b1..0000000 --- a/pyadmitad/transport.py +++ /dev/null @@ -1,456 +0,0 @@ -import requests -from base64 import b64encode -import json -import urllib -try: - import urlparse -except ImportError: - import urllib.parse -import uuid -import logging -from pyadmitad.constants import * -from pyadmitad.exceptions import * - - -LOG = logging.getLogger(__file__) -LOG.addHandler(logging.StreamHandler()) - - -def to_json(content): - try: - return json.loads(content) - except (TypeError, ValueError): - return content - - -def debug_log(value, debug=True): - if debug: - LOG.setLevel(logging.DEBUG) - LOG.debug(value) - else: - LOG.setLevel(logging.NOTSET) - - -def prepare_request_data( - data=None, headers=None, method='GET', - timeout=None, ssl_verify=False): - if headers is None: - headers = {} - kwargs = {} - if timeout is None: - timeout = DEFAULT_REQUEST_TIMEOUT - kwargs['timeout'] = timeout - if method == 'POST': - kwargs['data'] = data - if method == 'GET': - kwargs['params'] = data - kwargs['headers'] = headers - kwargs['allow_redirects'] = True - kwargs['verify'] = ssl_verify - return kwargs - - -def api_request( - url, data=None, headers=None, method='GET', - timeout=None, ssl_verify=False, debug=False): - kwargs = prepare_request_data( - data=data, headers=headers, method=method, - timeout=timeout, ssl_verify=ssl_verify) - status_code = 500 - content = u'' - try: - response = requests.request(method, url, **kwargs) - debug_log(u'Request url: %s' % response.url, debug) - if method == 'POST': - debug_log(u'Request body: %s' % response.request.body, debug) - status_code = response.status_code - content = response.content - if status_code >= 400: - response.raise_for_status() - return response.json() - except requests.HTTPError as err: - raise HttpException(status_code, to_json(content), err) - except requests.RequestException as err: - raise ConnectionException(err) - except (ValueError, TypeError) as err: - raise JsonException(err) - - -def get_credentials(client_id, client_secret): - return b64encode( - ("%s:%s" % (client_id, client_secret)).encode('utf-8') - ).decode('utf-8') - - -def api_post_request(url, **kwargs): - kwargs['method'] = "POST" - return api_request(url, **kwargs) - - -def api_get_request(url, **kwargs): - kwargs['method'] = "GET" - return api_request(url, **kwargs) - - -def build_authorization_headers(access_token): - return {'Authorization': "Bearer %s" % access_token} - - -def build_headers(access_token, user_agent=None, language=None): - headers = build_authorization_headers(access_token) - headers['Connection'] = 'Keep-Alive' - if user_agent: - headers['User-Agent'] = user_agent - if language: - headers['Content-Language'] = language - return headers - - -def oauth_password_authorization(data): - """ - OAuth2 password authorization - Used to get an access_token with the user's password and username - The function parameter should be a dictionary with next structure: - data = { - 'client_id': '', - 'client_secret': '', - 'username': '', - 'password': '', - 'scope': '' - } - """ - client_id = data['client_id'] - client_secret = data['client_secret'] - params = { - 'grant_type': 'password', - 'client_id': client_id, - 'username': data['username'], - 'password': data['password'], - 'scope': data['scopes'] - } - credentials = get_credentials(client_id, client_secret) - headers = { - 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': 'Basic %s' % credentials - } - return api_post_request(TOKEN_URL, data=params, headers=headers) - - -def oauth_refresh_access_token(data): - """ - refresh an access token. Returns dictionary with new access_token. - data['access-token'] - The function parameter should be a dictionary with next structure: - data = { - 'refresh_token': '', - 'client_secret': '', - 'client_id': '' - } - """ - refresh_token = data['refresh_token'] - client_id = data['client_id'] - client_secret = data['client_secret'] - params = { - 'grant_type': 'refresh_token', - 'client_id': client_id, - 'client_secret': client_secret, - 'refresh_token': refresh_token - } - headers = {'Content-Type': 'application/x-www-form-urlencoded'} - return api_post_request(TOKEN_URL, data=params, headers=headers) - - -def oauth_client_authorization(data): - """ - OAuth2 client authorization. - Used to get an access_token with the oauth client credentials - The function parameter should be a dictionary with next structure: - data = { - 'client_secret': '', - 'client_id': '' - 'scopes': '', - } - """ - client_id = data['client_id'] - client_secret = data['client_secret'] - params = { - 'grant_type': 'client_credentials', - 'client_id': client_id, - 'scope': data['scopes'] - } - credentials = get_credentials(client_id, client_secret) - headers = { - 'Content-Type': 'application/x-www-form-urlencoded', - 'Authorization': 'Basic %s' % credentials - } - return api_post_request(TOKEN_URL, data=params, headers=headers) - - -class OAuthServerAuthorisation(object): - """ - OAuth2 server authorization. - Used to get an access_token with the web authentication - """ - - def __init__(self, data): - """ - The constructor parameter should be a dictionary with next structure: - data = { - 'client_secret': '', - 'client_id': '' - 'scopes': '', - 'redirect_uri': '', - } - """ - self.client_id = data['client_id'] - self.client_secret = data['client_secret'] - self.scopes = data['scopes'] - self.redirect_uri = data.get('redirect_uri') - self.language = data.get('language', DEFAULT_LANGUAGE) - self.state = None - - def get_authorize_url(self): - """ - Get an url that client should be redirected to pass - the authentication - """ - self.state = uuid.uuid4().get_hex() - params = { - 'client_id': self.client_id, - 'response_type': 'code', - 'state': self.state, - 'scope': self.scopes, - 'redirect_uri': self.redirect_uri - } - return "%s?%s" % (AUTHORIZE_URL, urllib.urlencode(params)) - - def get_access_token(self, url): - """ - Get access token request. - The URL parameter is a URL to which the client was redirected - after authentication - """ - url_params = dict(urlparse.parse_qsl(urlparse.urlparse(url).query)) - state = url_params.get('state') - if not state or state != self.state: - raise ApiException('Wrong or absent the state parameter.') - if 'error' in url_params: - raise ApiException(url_params['error']) - if 'code' not in url_params: - raise ApiException( - 'Invalid response. The authorization code is absent.') - # go to get access token - params = { - 'grant_type': 'authorization_code', - 'client_id': self.client_id, - 'client_secret': self.client_secret, - 'code': url_params['code'], - 'redirect_uri': self.redirect_uri - } - headers = {'Content-Type': 'application/x-www-form-urlencoded'} - response = api_post_request(TOKEN_URL, data=params, headers=headers) - if 'access_token' not in response: - raise ApiException('Invalid response. The access_token is absent.') - return response - - -class HttpTransportPagination(object): - - DEFAULT_LIMIT = 20 - DEFAULT_OFFSET = 0 - - def __init__(self, **kwargs): - self.offset = self._get_pagination_offset(**kwargs) - self.limit = self._get_pagination_limit(**kwargs) - - @staticmethod - def _check_pagination_value(value, maximum=None, minimum=None): - try: - value = int(value) - except (ValueError, TypeError): - return - if value < 0: - return - if maximum is not None and value > maximum: - return - if minimum is not None and value < minimum: - return - return value - - def _get_pagination_limit(self, **kwargs): - if 'limit' in kwargs: - limit = self._check_pagination_value( - kwargs['limit'], MAX_PAGINATION_LIMIT, 1) - if limit is not None: - return limit - return self.DEFAULT_LIMIT - - def _get_pagination_offset(self, **kwargs): - if 'offset' in kwargs: - offset = self._check_pagination_value(kwargs['offset']) - if offset is not None: - return offset - return self.DEFAULT_OFFSET - - def to_value(self): - return {'limit': self.limit, 'offset': self.offset} - - -class HttpTransportOrdering(object): - - ORDER_PARAMETER = 'order_by' - - def __init__(self, **kwargs): - allowed_ordering = kwargs.get('allowed_ordering', ()) - ordering = str(kwargs.get(self.ORDER_PARAMETER, '')) - suffix = '' - if ordering: - if ordering.startswith('-'): - suffix = '-' - ordering = ordering[1:] - if ordering not in allowed_ordering: - ordering = None - self.ordering = ordering - self.suffix = suffix - - def to_value(self): - if self.ordering: - return {self.ORDER_PARAMETER: '%s%s' % (self.suffix, self.ordering)} - return {} - - -class HttpTransportFiltering(object): - - def __init__(self, **kwargs): - self.result = {} - allowed_filtering = kwargs.get('allowed_filtering', {}) or {} - if not allowed_filtering: - return - self.allowed_filtering = allowed_filtering - self.check_filtering(**kwargs) - - def check_value(self, val, func): - """ - Should return False in boolean meaning - in case of unsupported or wrong value - """ - if not func: - return val - try: - return func(val) - except (TypeError, ValueError): - pass - - def check_values(self, values, func): - return filter(None, [self.check_value(value, func) for value in values]) - - def check_filtering(self, **filtering): - for val in self.allowed_filtering: - value = filtering.get(val) - if value is None: - continue - if not isinstance(value, (tuple, list)): - value = [value] - func = self.allowed_filtering[val] - res = self.check_values(value, func) - if res: - self.result.setdefault(val, []).extend(res) - - def to_value(self): - for key in self.result: - self.result[key] = list(set(self.result[key])) - return self.result - - -class HttpTransport(object): - - SUPPORTED_METHODS = ('GET', 'POST') - SUPPORTED_LANGUAGES = ('ru', 'en', 'de', 'pl') - - def __init__(self, access_token, method=None, user_agent=None, debug=False): - self._headers = build_headers(access_token, user_agent=user_agent) - self._method = method or 'GET' - self._data = None - self._url = None - self._language = None - self._debug = debug - - def set_url(self, url, **kwargs): - self._url = url % kwargs - return self - - def set_language(self, language): - if language in self.SUPPORTED_LANGUAGES: - self._language = language - self._headers['Content-Language'] = language - else: - raise AttributeError( - 'This language "%s" is not supported' % language) - return self - - def set_data(self, data): - self._data = data - return self - - def clean_data(self): - self._data = None - return self - - def update_data(self, values): - if self._data is None: - self._data = {} - self._data.update(values) - return self - - def set_pagination(self, **kwargs): - return self.update_data(HttpTransportPagination(**kwargs).to_value()) - - def set_ordering(self, **kwargs): - return self.update_data(HttpTransportOrdering(**kwargs).to_value()) - - def set_filtering(self, **kwargs): - return self.update_data(HttpTransportFiltering(**kwargs).to_value()) - - def set_method(self, method): - if method in self.SUPPORTED_METHODS: - self._method = method - else: - raise AttributeError( - 'This http method "%s" is not supported' % method) - # here we should clean data - return self.clean_data() - - def set_debug(self, debug): - self._debug = debug - return self - - @staticmethod - def _handle_response(response): - return response - - @staticmethod - def api_request(url, **kwargs): - return api_request(url, **kwargs) - - def request(self, **kwargs): - if 'language' in kwargs: - self.set_language(kwargs['language']) - if 'url' in kwargs: - self.set_url(kwargs.pop('url'), **kwargs) - if 'debug' in kwargs: - self.set_debug(kwargs.pop('debug')) - if not self._url: - raise AttributeError( - 'Absent url parameter. Use set_url method or pass ' - 'url parameter in this method.') - requests_kwargs = { - 'method': self._method, - 'headers': self._headers, - 'data': self._data, - 'debug': self._debug - } - response = self.api_request(self._url, **requests_kwargs) - return kwargs.get('handler', self._handle_response)(response) - - def __call__(self, **kwargs): - return self.request(**kwargs) diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..224a779 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[metadata] +description-file = README.md \ No newline at end of file diff --git a/setup.py b/setup.py index 7243a7e..5c1e859 100644 --- a/setup.py +++ b/setup.py @@ -1,24 +1,19 @@ -"""The setup and build script for the pyadmitad library.""" -from ez_setup import use_setuptools -use_setuptools() -from setuptools import setup, find_packages - -__author__ = 'trezorg@gmail.com' -__version__ = '0.0.1' +from setuptools import setup setup( - name="pyadmitad", - version=__version__, - author='Igor Nemilentsev', - author_email='trezorg@gmail.com', + name="admitad", + packages=['admitad', 'admitad.items'], + version='1.2.0', + author='Admitad Dev Bot', + author_email='dev@admitad.com', description='A Python wrapper around the Admitad API', license='MIT', - url='https://github.com/trezorg/admitad-python-api.git', - keywords='admitad', - packages=find_packages(exclude='tests'), - install_requires=['requests'], - test_suite='nose.collector', - tests_require=['nose', 'mocker'], + url='https://github.com/admitad/admitad-python-api', + download_url='https://github.com/admitad/admitad-python-api/tarball/1.2.0', + keywords=['admitad'], + install_requires=['requests>=2.0', 'future'], + tests_require=['nose2', 'responses'], + test_suite='nose2.collector.collector', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', @@ -26,9 +21,7 @@ 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Communications', 'Topic :: Internet', - ], - dependency_links=[ - "git+https://github.com/trezorg/mocker.git#egg=mocker", + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.5', ], ) -