#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Script to upload a csv file to the Xibo dataset and replace
# Imports
import os
from xml.dom import minidom
import XiboAPI
api = XiboAPI.XiboAPI()
#################################
# START LIBRARY FUNCTIONS
#################################
def uploadFile(api,path,chunksize=256000,fileid='0',offset=0,size=0):
# Uploads file at path (full path)
# Returns a file upload ID which can be used to add the media
# to the library
#
# eg uploadFile(api,'/home/user/1.jpg')
#
# Optional Parameters:
# chunksize - Int: How many bytes to upload at each pass
# fileid - String: The fileUploadID to use. Useful if you're resuming an interupted upload.
# offset - Long: How many bytes to skip over before beginning upload. Useful for resuming an interupted upload.
# size - Long: How many bytes to upload. 0 = the whole file. Useful to break an upload part way through for testing.
# Must be smaller than the size of the file in bytes.
# First check the test file exists
if not os.path.isfile(path):
print "Error: File does not exist %s " % path
exit(1)
checksum = ''
data = ''
if size == 0:
size = os.path.getsize(path)
chunkOK = False
# If the file is smaller than the chunksize, send one chunk
# of the correct size.
if chunksize > size:
chunksize = size
# Open the file for binary read
fh = open(path,'rb')
while offset < size:
attempts = 0
chunkOK = False
while attempts < 3 and not chunkOK:
attempts += 1
# Read chunksize bytes of the file
fh.seek(offset)
data = fh.read(chunksize)
# Data needs to be base64 encoded to be sent
data, checksum = api.b64encode(data)
params = [('fileId',fileid),
('offset',offset),
('checksum',checksum),
('payload',data)
]
response, status, code, message, content = api.callMethod('LibraryMediaFileUpload', params)
# If the chunk went up OK then move to the next chunk
if status == 'OK':
chunkOK = True
else:
print 'Uploading chunk failed. Error %s: %s' % (code,message)
if not chunkOK:
# We did three tries and the chunk still failed.
print 'Uploading chunk failed after three attempts. File: %s Id: %s Offset: %s Attempt: %s' % (path,fileid,offset,attempts)
exit(1)
# Store the fileID so we can reuse it
fileid = api.parseID(content,'file','id')
# Get the offset the server has already (to support resuming uploads)
offset = api.parseID(content,'file','offset')
# Make sure we don't upload past the end of the file!
if offset + chunksize > size:
chunksize = size - offset
# All chunks uploaded
# Close the file handle
fh.close()
return fileid
def layoutRegionMediaDelete(api,layoutid,regionid,mediaid,lkid):
params = [('mediaId',mediaid),
('regionId',regionid),
('layoutId',int(layoutid)),
('lkId',int(lkid))
]
response, status, code, message, content = api.callMethod('LayoutRegionMediaDelete', params)
def libraryMediaDelete(api,mediaid):
params = [('mediaId',mediaid)]
response, status, code, message, content = api.callMethod('LibraryMediaDelete', params)
#################################
# END LIBRARY FUNCTIONS
#################################
fileToUpload = 't.csv'
mediaName = 'Test CSV'
dataSetId = 10
uploadId = uploadFile(api,fileToUpload)
spreadSheetMapping = '{"0":"34","1":"35"}'
print "upload ID %d" % uploadId
params = [('dataSetId',dataSetId),('fileId',uploadId),('spreadSheetMapping',spreadSheetMapping),('overwrite',1),('ignoreFirstRow',0)]
response, status, code, message, content = api.callMethod('DataSetImportCsv', params)
print message
print content