Skip to content

Commit

Permalink
Playlist changes support: addressed comments in mopidy#236
Browse files Browse the repository at this point in the history
  • Loading branch information
blacklight authored and girst committed Nov 3, 2020
1 parent 95c1031 commit 5333536
Showing 1 changed file with 28 additions and 28 deletions.
56 changes: 28 additions & 28 deletions mopidy_spotify/playlists.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,32 +49,29 @@ def _get_playlist(self, uri, as_items=False):
as_items,
)

@staticmethod
def _get_user_and_playlist_id_from_uri(uri):
user_id = uri.split(':')[-3]
playlist_id = uri.split(':')[-1]
return user_id, playlist_id

def _playlist_edit(self, playlist, method, **kwargs):
user_id = playlist.uri.split(':')[-3]
playlist_id = playlist.uri.split(':')[-1]
user_id, playlist_id = self._get_user_and_playlist_id_from_uri(playlist.uri)
url = f'users/{user_id}/playlists/{playlist_id}/tracks'
method = getattr(self._backend._web_client, method.lower())
if not method:
self.logger.error(f'Invalid HTTP method "{method}"')
return playlist
raise AttributeError(f'Invalid HTTP method "{method}"')

logger.debug(f'API request: {method} {url}')
response = method(
url, headers={'Content-Type': 'application/json'}, json=kwargs)
response = method(url, json=kwargs)

logger.debug(f'API response: {response}')

if response and 'error' not in response:
# TODO invalidating the whole cache is probably a bit much if we have
# updated only one playlist - maybe we should expose an API to clear
# cache items by key?
self._backend._web_client.clear_cache()
return self.lookup(playlist.uri)
else:
logging.error('Error on playlist item(s) removal: {}'.format(
response['error'] if response else '(Unknown error)'))

return playlist
# TODO invalidating the whole cache is probably a bit much if we have
# updated only one playlist - maybe we should expose an API to clear
# cache items by key?
self._backend._web_client.clear_cache()
return self.lookup(playlist.uri)

def refresh(self):
if not self._backend._web_client.logged_in:
Expand All @@ -93,10 +90,8 @@ def refresh(self):

def create(self, name):
logger.info(f'Creating playlist {name}')
url = f'users/{user_id}/playlists'
response = self._backend._web_client.post(
url, headers={'Content-Type': 'application/json'})

url = f'users/{web_client.user_id}/playlists'
response = self._backend._web_client.post(url)
return self.lookup(response['uri'])

def delete(self, uri):
Expand All @@ -114,14 +109,12 @@ def save(self, playlist):

new_tracks = {track.uri: track for track in playlist.tracks}
cur_tracks = {track.uri: track for track in saved_playlist.tracks}
removed_uris = set([track.uri
for track in saved_playlist.tracks
if track.uri not in new_tracks])
removed_uris = set(cur_tracks.keys()).difference(set(new_tracks.keys()))

# Remove tracks logic
if removed_uris:
logger.info('Removing {} tracks from playlist {}: {}'.format(
len(removed_uris), playlist.name, removed_uris))
logger.info(f'Removing {len(removed_uris)} tracks from playlist ' +
f'{saved_playlist.name}: {removed_uris}')

cur_tracks = {
track.uri: track
Expand Down Expand Up @@ -167,8 +160,8 @@ def save(self, playlist):
if track.uri in cur_tracks_by_uri:
cur_pos = cur_tracks_by_uri[track.uri]
new_pos = i+1
logger.info('Moving item position [{}] to [{}] in playlist {}'.
format(cur_pos, new_pos, playlist.name))
logger.info(f'Moving item position [{cur_pos}] to [{new_pos}] in ' +
f'playlist {playlist.name}')

cur_tracks = {
track.uri: track
Expand All @@ -177,6 +170,13 @@ def save(self, playlist):
range_start=cur_pos, insert_before=new_pos).tracks
}

# Playlist rename logic
if playlist.name != saved_playlist.name:
logger.info(f'Renaming playlist [{saved_playlist.name}] to [{playlist.name}]')
user_id, playlist_id = self._get_user_and_playlist_id_from_uri(saved_playlist.uri)
self._backend._web_client.put(f'users/{user_id}/playlists/{playlist_id}',
json={'name': playlist.name})

self._backend._web_client.clear_cache()
return self.lookup(saved_playlist.uri)

Expand Down

0 comments on commit 5333536

Please sign in to comment.