From 4f3bb0c93094a3af13db4945c04a6a3020095e5d Mon Sep 17 00:00:00 2001
From: Kirill Smelkov <kirr@nexedi.com>
Date: Wed, 11 Mar 2015 15:43:50 +0300
Subject: [PATCH] Revert "BigFile: Fixes, Tests and on-server Append support"

This reverts commit 193f5cddf923b5a00b176b32db76d44c00c97e82, reversing
changes made to 4ee61a23d00615d1bb5e0168264620775d0bc233.

Jean-Paul suggested we first better further review our code review / merging
procedures and use e.g. this particular merge request as a basis for that.

Thus I'm reverting this merge, so people could study and re-merge it
"properly".

~~~~

Please note: I could potentially rewrite master history so that there
would be a no merge commit at all, as e.g. my branch was not so far ever
merged at all. In contrast to draft branches, that is however a not good
practice to rebase, and thus rewrite, master history - what has been
committed is committed and we only continue.

So later to re-merge my branch, if it will not be changed, we'll need to
first revert this revert (see [1] for rationale), or alternatively I
could re-prepare my patches with different sha1 ids and they will merge
again the "usual way".

[1] http://git-scm.com/docs/howto/revert-a-faulty-merge.html
---
 .../BigFileModule_viewBigFileList.xml         |   2 +-
 .../erp5_big_file/BigFile_view.xml            |   2 +-
 .../test.erp5.testBigFile.py                  | 359 ------------------
 .../test.erp5.testBigFile.xml                 | 123 ------
 bt5/erp5_big_file/bt/change_log               |   1 -
 bt5/erp5_big_file/bt/template_test_id_list    |   1 -
 bt5/erp5_big_file/bt/test_dependency_list     |   1 -
 bt5/erp5_big_file/bt/version                  |   2 +-
 product/ERP5/Document/BigFile.py              | 105 ++---
 9 files changed, 39 insertions(+), 557 deletions(-)
 delete mode 100644 bt5/erp5_big_file/TestTemplateItem/portal_components/test.erp5.testBigFile.py
 delete mode 100644 bt5/erp5_big_file/TestTemplateItem/portal_components/test.erp5.testBigFile.xml
 delete mode 100644 bt5/erp5_big_file/bt/change_log
 delete mode 100644 bt5/erp5_big_file/bt/template_test_id_list
 delete mode 100644 bt5/erp5_big_file/bt/test_dependency_list

diff --git a/bt5/erp5_big_file/SkinTemplateItem/portal_skins/erp5_big_file/BigFileModule_viewBigFileList.xml b/bt5/erp5_big_file/SkinTemplateItem/portal_skins/erp5_big_file/BigFileModule_viewBigFileList.xml
index 4369803df7..ed3a9e9f0e 100644
--- a/bt5/erp5_big_file/SkinTemplateItem/portal_skins/erp5_big_file/BigFileModule_viewBigFileList.xml
+++ b/bt5/erp5_big_file/SkinTemplateItem/portal_skins/erp5_big_file/BigFileModule_viewBigFileList.xml
@@ -2,7 +2,7 @@
 <ZopeData>
   <record id="1" aka="AAAAAAAAAAE=">
     <pickle>
-      <global name="ERP5 Form" module="erp5.portal_type"/>
+      <global name="ERP5Form" module="Products.ERP5Form.Form"/>
     </pickle>
     <pickle>
       <dictionary>
diff --git a/bt5/erp5_big_file/SkinTemplateItem/portal_skins/erp5_big_file/BigFile_view.xml b/bt5/erp5_big_file/SkinTemplateItem/portal_skins/erp5_big_file/BigFile_view.xml
index 7a250a47b8..6ff852500f 100644
--- a/bt5/erp5_big_file/SkinTemplateItem/portal_skins/erp5_big_file/BigFile_view.xml
+++ b/bt5/erp5_big_file/SkinTemplateItem/portal_skins/erp5_big_file/BigFile_view.xml
@@ -2,7 +2,7 @@
 <ZopeData>
   <record id="1" aka="AAAAAAAAAAE=">
     <pickle>
-      <global name="ERP5 Form" module="erp5.portal_type"/>
+      <global name="ERP5Form" module="Products.ERP5Form.Form"/>
     </pickle>
     <pickle>
       <dictionary>
diff --git a/bt5/erp5_big_file/TestTemplateItem/portal_components/test.erp5.testBigFile.py b/bt5/erp5_big_file/TestTemplateItem/portal_components/test.erp5.testBigFile.py
deleted file mode 100644
index 62372ef7b0..0000000000
--- a/bt5/erp5_big_file/TestTemplateItem/portal_components/test.erp5.testBigFile.py
+++ /dev/null
@@ -1,359 +0,0 @@
-# ERP5.Document | Tests for BigFile
-#
-# Copyright (C) 2015 Nexedi SA and Contributors. All Rights Reserved.
-#               Kirill Smelkov <kirr@nexedi.com>
-#
-# WARNING: This program as such is intended to be used by professional
-# programmers who take the whole responsability of assessing all potential
-# consequences resulting from its eventual inadequacies and bugs
-# End users who are looking for a ready-to-use solution with commercial
-# garantees and support are strongly adviced to contract a Free Software
-# Service Company
-#
-# This program is Free Software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License
-# as published by the Free Software Foundation; either version 2
-# of the License, or (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-
-from cStringIO import StringIO
-
-from ZPublisher.HTTPRequest import HTTPRequest
-from ZPublisher.HTTPResponse import HTTPResponse
-from Products.ERP5Type.tests.ERP5TypeTestCase import ERP5TypeTestCase
-from Products.ERP5Type.BTreeData import BTreeData
-
-
-# like Testing.makerequest, but
-#
-#   1. always redirects stdout to stringio,
-#   2. stdin content can be specified and is processed,
-#   3. returns actual request object (not wrapped portal).
-#
-# see also: Products.CMFCore.tests.test_CookieCrumbler.makerequest()
-# TODO makerequest() is generic enough and deserves moving to testing utils
-def makerequest(environ=None, stdin=''):
-  stdout = StringIO()
-  stdin  = StringIO(stdin)
-  if environ is None:
-    environ = {}
-
-  # Header-Name -> HEADER_NAME
-  _ = {}
-  for k,v in environ.items():
-    k = k.replace('-', '_').upper()
-    _[k] = v
-  environ = _
-
-  response = HTTPResponse(stdout=stdout)
-  environ.setdefault('SERVER_NAME',    'foo')
-  environ.setdefault('SERVER_PORT',    '80')
-  request  = HTTPRequest(stdin, environ, response)
-  # process stdin data
-  request.processInputs()
-  return request
-
-# generate makerequest-like function for http method
-def request_function(method_name):
-  method_name = method_name.upper()
-  def method_func(environ=None, stdin=''):
-    if environ is None:
-      environ = {}
-    environ.setdefault('REQUEST_METHOD', method_name)
-    return makerequest(environ, stdin)
-  method_func.func_name = method_name
-  return method_func
-
-# requests
-get = request_function('GET')
-put = request_function('PUT')
-
-
-# FIXME Zope translates 308 to 500
-# https://github.com/zopefoundation/Zope/blob/2.13/src/ZPublisher/HTTPResponse.py#L223
-# https://github.com/zopefoundation/Zope/blob/2.13/src/ZPublisher/HTTPResponse.py#L64
-R308 = 500
-
-
-
-class TestBigFile(ERP5TypeTestCase):
-  """Tests for ERP5.Document.BigFile"""
-
-  def getBusinessTemplateList(self):
-    """bt5 required to run this tests"""
-    return ('erp5_big_file',
-               # test runner does not automatically install bt5 dependencies -
-               # - next erp5_big_file dependencies are specified manually
-               'erp5_dms',
-                 'erp5_web',
-                 'erp5_ingestion',
-                 'erp5_base',
-               )[::-1]	# NOTE install order is important
-
-
-  # check that object (document) method corresponding to request returns
-  # result, with expected response body, status and headers
-  def checkRequest(self, document, request, kw, result, body, status, header_dict):
-    # request -> method to call
-    method_name = request['REQUEST_METHOD']
-    if method_name == 'GET':
-      method_name = 'index_html'
-    method = getattr(document, method_name)
-
-    ret = method (request, request.RESPONSE, **kw)
-    # like in ZPublisher - returned RESPONSE means empty
-    if ret is request.RESPONSE:
-      ret = ''
-    self.assertEqual(ret,       result)
-    self.assertEqual(status,    request.RESPONSE.getStatus())
-    for h,v in header_dict.items():
-      rv = request.RESPONSE.getHeader(h)
-      self.assertEqual(v, rv, '%s: %r != %r' % (h, v, rv))
-
-    # force response flush to its stdout
-    request.RESPONSE.write('')
-    # body and headers are delimited by empty line (RFC 2616, 4.1)
-    response_body = request.RESPONSE.stdout.getvalue().split('\r\n\r\n', 1)[1]
-    self.assertEqual(body, response_body)
-
-
-  # basic tests for working with BigFile via its public interface
-  def testBigFile_01_Basic(self):
-    big_file_module = self.getPortal().big_file_module
-    f = big_file_module.newContent(portal_type='Big File')
-    check = lambda *args: self.checkRequest(f, *args)
-
-
-    # after creation file is empty and get(0-0) returns 416
-    self.assertEqual(f.getSize(), 0)
-    self.assertEqual(f.getData(), '')
-
-                                             #  result body status headers
-    check(get(), {'format': 'raw'},                 '', '', 200, {'Content-Length': '0'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '',R308, {                                  'Range': 'bytes 0--1'})  # XXX 0--1 ok?
-    check(get({        'Range': 'bytes=0-0'}),{},   '', '', 416, {'Content-Length': '0',    'Content-Range': 'bytes */0'})
-
-
-    # append empty chunk - the same
-    f._appendData('')
-    self.assertEqual(f.getSize(), 0)
-    self.assertEqual(f.getData(), '')
-
-    check(get(), {'format': 'raw'},                 '', '', 200, {'Content-Length': '0'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '',R308, {                                  'Range': 'bytes 0--1'})  # XXX 0--1 ok?
-    check(get({        'Range': 'bytes=0-0'}),{},   '', '', 416, {                          'Content-Range': 'bytes */0'})
-
-
-    # append 1 byte - file should grow up and get(0-0) returns 206
-    f._appendData('x')
-    self.assertEqual(f.getSize(), 1)
-    self.assertEqual(f.getData(), 'x')
-
-    check(get(), {'format': 'raw'},                 '', 'x', 200, {'Content-Length': '1'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '', R308, {                                'Range': 'bytes 0-0'})
-    check(get({        'Range': 'bytes=0-0'}),{},   '', 'x', 206, {'Content-Length': '1',  'Content-Range': 'bytes 0-0/1'})
-
-
-    # append another 2 bytes and try to get whole file and partial contents
-    f._appendData('yz')
-    self.assertEqual(f.getSize(), 3)
-    self.assertEqual(f.getData(), 'xyz')
-
-    check(get(), {'format': 'raw'},                 '', 'xyz',  200, {'Content-Length': '3'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '',    R308, {                                'Range': 'bytes 0-2'})
-    check(get({        'Range': 'bytes=0-0'}),{},   '', 'x'  ,  206, {'Content-Length': '1',  'Content-Range': 'bytes 0-0/3'})
-    check(get({        'Range': 'bytes=1-1'}),{},   '',  'y' ,  206, {'Content-Length': '1',  'Content-Range': 'bytes 1-1/3'})
-    check(get({        'Range': 'bytes=2-2'}),{},   '',   'z',  206, {'Content-Length': '1',  'Content-Range': 'bytes 2-2/3'})
-    check(get({        'Range': 'bytes=0-1'}),{},   '', 'xy' ,  206, {'Content-Length': '2',  'Content-Range': 'bytes 0-1/3'})
-    check(get({        'Range': 'bytes=1-2'}),{},   '',  'yz',  206, {'Content-Length': '2',  'Content-Range': 'bytes 1-2/3'})
-    check(get({        'Range': 'bytes=0-2'}),{},   '', 'xyz',  206, {'Content-Length': '3',  'Content-Range': 'bytes 0-2/3'})
-
-    # append via PUT with range
-    check(put({'Content-Range': 'bytes 3-4/5', 'Content-Length': '2'}, '01'),{},  '', '', 204, {})
-    self.assertEqual(f.getSize(), 5)
-    self.assertEqual(f.getData(), 'xyz01')
-    check(get(), {'format': 'raw'},                 '', 'xyz01',200, {'Content-Length': '5'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '',    R308, {                                'Range': 'bytes 0-4'})
-    check(get({        'Range': 'bytes=0-4'}),{},   '', 'xyz01',206, {'Content-Length': '5',  'Content-Range': 'bytes 0-4/5'})
-    check(get({        'Range': 'bytes=1-3'}),{},   '',  'yz0' ,206, {'Content-Length': '3',  'Content-Range': 'bytes 1-3/5'})
-    check(get({        'Range': 'bytes=1-2'}),{},   '',  'yz'  ,206, {'Content-Length': '2',  'Content-Range': 'bytes 1-2/5'})
-    check(get({        'Range': 'bytes=2-2'}),{},   '',   'z'  ,206, {'Content-Length': '1',  'Content-Range': 'bytes 2-2/5'})
-
-    # replace whole content via PUT without range
-    # (and we won't exercise GET with range routinely further)
-    check(put({'Content-Length': '3'}, 'abc'),{},  '', '', 204, {})
-    self.assertEqual(f.getSize(), 3)
-    self.assertEqual(f.getData(), 'abc')
-    check(get(), {'format': 'raw'},                 '', 'abc',  200, {'Content-Length': '3'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '',    R308, {                                'Range': 'bytes 0-2'})
-
-    # append via PUT with range (again)
-    check(put({'Content-Range': 'bytes 3-7/8', 'Content-Length': '5'}, 'defgh'),{},  '', '', 204, {})
-    self.assertEqual(f.getSize(), 8)
-    self.assertEqual(f.getData(), 'abcdefgh')
-    check(get(), {'format': 'raw'},                 '', 'abcdefgh', 200, {'Content-Length': '8'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '',        R308, {                            'Range': 'bytes 0-7'})
-
-    # append via ._appendData()  (again)
-    f._appendData('ij')
-    self.assertEqual(f.getSize(), 10)
-    self.assertEqual(f.getData(), 'abcdefghij')
-    check(get(), {'format': 'raw'},                 '', 'abcdefghij', 200, {'Content-Length': '10'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '',          R308, {                          'Range': 'bytes 0-9'})
-
-    # make sure PUT with incorrect/non-append range is rejected
-    check(put({'Content-Range': 'bytes 10-10/10', 'Content-Length': '1'}, 'k'),{}, '', '', 400, {'X-Explanation': 'Total size unexpected'})
-    self.assertEqual(f.getData(), 'abcdefghij')
-    check(put({'Content-Range': 'bytes 10-10/11', 'Content-Length': '2'}, 'k'),{}, '', '', 400, {'X-Explanation': 'Content length unexpected'})
-    self.assertEqual(f.getData(), 'abcdefghij')
-    check(put({'Content-Range': 'bytes 8-8/10',   'Content-Length': '1'}, '?'),{}, '', '', 400, {'X-Explanation': 'Can only append data'})
-    check(put({'Content-Range': 'bytes 9-9/10',   'Content-Length': '1'}, '?'),{}, '', '', 400, {'X-Explanation': 'Can only append data'})
-    check(put({'Content-Range': 'bytes 9-10/11',  'Content-Length': '2'},'??'),{}, '', '', 400, {'X-Explanation': 'Can only append data'})
-    check(put({'Content-Range': 'bytes 11-11/12', 'Content-Length': '1'}, '?'),{}, '', '', 400, {'X-Explanation': 'Can only append data'})
-
-    # TODO test 'If-Range' with GET
-    # TODO test multiple ranges in 'Range' with GET
-
-
-  # test BigFile's .data property can be of several types and is handled
-  # properly and we can still get data and migrate to BTreeData transparently
-  # (called from under testBigFile_02_DataVarious driver)
-  def _testBigFile_02_DataVarious(self):
-    # BigFile's .data can be:
-    # str       - because data comes from Data property sheet and default value is ''
-    # None      - because it can be changed
-    # BTreeData - because it is scalable way to work with large content
-    #
-    # str can be possibly non-empty because we could want to transparently
-    # migrate plain File documents to BigFiles.
-    #
-    # make sure BigFile correctly works in all those situations.
-
-    big_file_module = self.getPortal().big_file_module
-    f = big_file_module.newContent(portal_type='Big File')
-    check = lambda *args: self.checkRequest(f, *args)
-
-    # after creation .data is ''  (as per default from Data property sheet)
-    _ = f._baseGetData()
-    self.assertIsInstance(_, str)
-    self.assertEqual(_, '')
-
-    # make sure we can get empty content through all ways
-    # (already covered in testBigFile_01_Basic, but still)
-    self.assertEqual(f.getSize(), 0)
-    self.assertEqual(f.getData(), '')
-    check(get(), {'format': 'raw'},                 '', '', 200, {'Content-Length': '0'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '',R308, {                                'Range': 'bytes 0--1'})  # XXX 0--1 ok?
-    check(get({        'Range': 'bytes=0-0'}),{},   '', '', 416, {'Content-Length': '0',  'Content-Range': 'bytes */0'})
-
-
-    # set .data to non-empty str and make sure we can get content through all ways
-    f._baseSetData('abc')
-    _ = f._baseGetData()
-    self.assertIsInstance(_, str)
-    self.assertEqual(_, 'abc')
-    self.assertEqual(f.getSize(), 3)
-    self.assertEqual(f.getData(), 'abc')
-    check(get(), {'format': 'raw'},                 'abc', '',    200, {'Content-Length': '3'})
-    check(put({'Content-Range': 'bytes */*'}),{},   ''   , '',   R308, {                                'Range': 'bytes 0-2'})
-    check(get({        'Range': 'bytes=0-2'}),{},   ''   , 'abc', 206, {'Content-Length': '3',  'Content-Range': 'bytes 0-2/3'})
-
-    # and .data should remain str after access (though later this could be
-    # changed to transparently migrate to BTreeData)
-    _ = f._baseGetData()
-    self.assertIsInstance(_, str)
-    self.assertEqual(_, 'abc')
-
-
-    # on append .data should migrate to BTreeData
-    f._appendData('d')
-    _ = f._baseGetData()
-    self.assertIsInstance(_, BTreeData)
-    self.assertEqual(f.getSize(), 4)
-    self.assertEqual(f.getData(), 'abcd')
-    check(get(), {'format': 'raw'},                 '', 'abcd', 200, {'Content-Length': '4'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '',    R308, {                                'Range': 'bytes 0-3'})
-    check(get({        'Range': 'bytes=0-3'}),{},   '', 'abcd', 206, {'Content-Length': '4',  'Content-Range': 'bytes 0-3/4'})
-
-
-
-    # change .data to None and make sure we can still get empty content
-    # NOTE additionally to ._baseSetData(None), ._setData(None) also sets
-    #      .size=0 which is needed for correct BigFile bases operation.
-    #
-    #      see ERP5.Document.File._setSize() for details.
-    f._setData(None)
-    # NOTE still '' because it is default value specified in Data property
-    #      sheet for .data field
-    _ = f._baseGetData()
-    self.assertIsInstance(_, str)
-    self.assertEqual(_, '')
-    # but we can change property sheet default on the fly
-    # XXX ( only for this particular getter _baseGetData -
-    #       - because property type information is not stored in one place,
-    #       but is copied on getter/setter initialization - see Getter/Setter
-    #       in ERP5Type.Accessor.Base )
-    # NOTE this change is automatically reverted back in calling helper
-    self.assertIsInstance(f._baseGetData._default, str)
-    self.assertEqual(f._baseGetData._default, '')
-    f._baseGetData.im_func._default = None  # NOTE not possible to do on just f._baseGetData
-    self.assertIs(f._baseGetData._default, None)
-    self.assertIs(f._baseGetData(), None)   # <- oops
-
-    self.assertEqual(f.getSize(), 0)
-    self.assertIs   (f.getData(), None)
-    check(get(), {'format': 'raw'},                 '', '', 200, {'Content-Length': '0'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '',R308, {                                'Range': 'bytes 0--1'})  # XXX 0--1 ok?
-    check(get({        'Range': 'bytes=0-0'}),{},   '', '', 416, {'Content-Length': '0',  'Content-Range': 'bytes */0'})
-
-
-    # on append .data should become BTreeData
-    f._appendData('x')
-    _ = f._baseGetData()
-    self.assertIsInstance(_, BTreeData)
-    self.assertEqual(f.getSize(), 1)
-    self.assertEqual(f.getData(), 'x')
-    check(get(), {'format': 'raw'},                 '', 'x',    200, {'Content-Length': '1'})
-    check(put({'Content-Range': 'bytes */*'}),{},   '', '',    R308, {                                'Range': 'bytes 0-0'})
-    check(get({        'Range': 'bytes=0-3'}),{},   '', 'x',    206, {'Content-Length': '1',  'Content-Range': 'bytes 0-0/1'})
-
-
-
-  # helper to call _testBigFile_02_DataVarious() and restore .data._default
-  def testBigFile_02_DataVarious(self):
-    big_file_module = self.getPortal().big_file_module
-    f = big_file_module.newContent(portal_type='Big File')
-
-    # Data property sheet specifies .data default to ''
-    _ = f._baseGetData()
-    self.assertIsInstance(_, str)
-    self.assertEqual(_, '')
-
-    # NOTE obtaining getter is not possible via BigFile._baseGetData
-    g = f._baseGetData.im_func
-    self.assertIsInstance(g._default, str)
-    self.assertEqual(g._default, '')
-
-    try:
-      self._testBigFile_02_DataVarious()
-
-    # restore ._baseGetData._default and make sure restoration really worked
-    finally:
-      g._default = ''
-      f._baseSetData(None)    # so that we are sure getter returns class defaults
-      _ = f._baseGetData()
-      self.assertIsInstance(_, str)
-      self.assertEqual(_, '')
-
-
-
-  # TODO write big data to file and ensure it still works
-  # TODO test streaming works in chunks
diff --git a/bt5/erp5_big_file/TestTemplateItem/portal_components/test.erp5.testBigFile.xml b/bt5/erp5_big_file/TestTemplateItem/portal_components/test.erp5.testBigFile.xml
deleted file mode 100644
index 70d9480508..0000000000
--- a/bt5/erp5_big_file/TestTemplateItem/portal_components/test.erp5.testBigFile.xml
+++ /dev/null
@@ -1,123 +0,0 @@
-<?xml version="1.0"?>
-<ZopeData>
-  <record id="1" aka="AAAAAAAAAAE=">
-    <pickle>
-      <global name="Test Component" module="erp5.portal_type"/>
-    </pickle>
-    <pickle>
-      <dictionary>
-        <item>
-            <key> <string>_recorded_property_dict</string> </key>
-            <value>
-              <persistent> <string encoding="base64">AAAAAAAAAAI=</string> </persistent>
-            </value>
-        </item>
-        <item>
-            <key> <string>default_reference</string> </key>
-            <value> <string>testBigFile</string> </value>
-        </item>
-        <item>
-            <key> <string>description</string> </key>
-            <value>
-              <none/>
-            </value>
-        </item>
-        <item>
-            <key> <string>id</string> </key>
-            <value> <string>test.erp5.testBigFile</string> </value>
-        </item>
-        <item>
-            <key> <string>portal_type</string> </key>
-            <value> <string>Test Component</string> </value>
-        </item>
-        <item>
-            <key> <string>sid</string> </key>
-            <value>
-              <none/>
-            </value>
-        </item>
-        <item>
-            <key> <string>text_content_error_message</string> </key>
-            <value>
-              <tuple/>
-            </value>
-        </item>
-        <item>
-            <key> <string>text_content_warning_message</string> </key>
-            <value>
-              <tuple/>
-            </value>
-        </item>
-        <item>
-            <key> <string>version</string> </key>
-            <value> <string>erp5</string> </value>
-        </item>
-        <item>
-            <key> <string>workflow_history</string> </key>
-            <value>
-              <persistent> <string encoding="base64">AAAAAAAAAAM=</string> </persistent>
-            </value>
-        </item>
-      </dictionary>
-    </pickle>
-  </record>
-  <record id="2" aka="AAAAAAAAAAI=">
-    <pickle>
-      <global name="PersistentMapping" module="Persistence.mapping"/>
-    </pickle>
-    <pickle>
-      <dictionary>
-        <item>
-            <key> <string>data</string> </key>
-            <value>
-              <dictionary/>
-            </value>
-        </item>
-      </dictionary>
-    </pickle>
-  </record>
-  <record id="3" aka="AAAAAAAAAAM=">
-    <pickle>
-      <global name="PersistentMapping" module="Persistence.mapping"/>
-    </pickle>
-    <pickle>
-      <dictionary>
-        <item>
-            <key> <string>data</string> </key>
-            <value>
-              <dictionary>
-                <item>
-                    <key> <string>component_validation_workflow</string> </key>
-                    <value>
-                      <persistent> <string encoding="base64">AAAAAAAAAAQ=</string> </persistent>
-                    </value>
-                </item>
-              </dictionary>
-            </value>
-        </item>
-      </dictionary>
-    </pickle>
-  </record>
-  <record id="4" aka="AAAAAAAAAAQ=">
-    <pickle>
-      <global name="WorkflowHistoryList" module="Products.ERP5Type.patches.WorkflowTool"/>
-    </pickle>
-    <pickle>
-      <tuple>
-        <none/>
-        <list>
-          <dictionary>
-            <item>
-                <key> <string>action</string> </key>
-                <value> <string>validate</string> </value>
-            </item>
-            <item>
-                <key> <string>validation_state</string> </key>
-                <value> <string>validated</string> </value>
-            </item>
-          </dictionary>
-        </list>
-      </tuple>
-    </pickle>
-  </record>
-</ZopeData>
diff --git a/bt5/erp5_big_file/bt/change_log b/bt5/erp5_big_file/bt/change_log
deleted file mode 100644
index 2a98f04d20..0000000000
--- a/bt5/erp5_big_file/bt/change_log
+++ /dev/null
@@ -1 +0,0 @@
-1.1 added tests for BigFile
diff --git a/bt5/erp5_big_file/bt/template_test_id_list b/bt5/erp5_big_file/bt/template_test_id_list
deleted file mode 100644
index d8c9a1d20a..0000000000
--- a/bt5/erp5_big_file/bt/template_test_id_list
+++ /dev/null
@@ -1 +0,0 @@
-test.erp5.testBigFile
\ No newline at end of file
diff --git a/bt5/erp5_big_file/bt/test_dependency_list b/bt5/erp5_big_file/bt/test_dependency_list
deleted file mode 100644
index bff77e32d8..0000000000
--- a/bt5/erp5_big_file/bt/test_dependency_list
+++ /dev/null
@@ -1 +0,0 @@
-erp5_full_text_mroonga_catalog
\ No newline at end of file
diff --git a/bt5/erp5_big_file/bt/version b/bt5/erp5_big_file/bt/version
index b123147e2a..56a6051ca2 100644
--- a/bt5/erp5_big_file/bt/version
+++ b/bt5/erp5_big_file/bt/version
@@ -1 +1 @@
-1.1
\ No newline at end of file
+1
\ No newline at end of file
diff --git a/product/ERP5/Document/BigFile.py b/product/ERP5/Document/BigFile.py
index cab6ccf253..a1fbc8d1c4 100644
--- a/product/ERP5/Document/BigFile.py
+++ b/product/ERP5/Document/BigFile.py
@@ -24,42 +24,14 @@ from ZPublisher.HTTPRequest import FileUpload
 from ZPublisher import HTTPRangeSupport
 from webdav.common import rfc1123_date
 from mimetools import choose_boundary
-from Products.CMFCore.utils import _setCacheHeaders, _ViewEmulator
-from DateTime import DateTime
+from Products.CMFCore.utils import getToolByName, _setCacheHeaders,\
+    _ViewEmulator
 import re
 
 class BigFile(File):
   """
   Support storing huge file.
   No convertion is allowed for now.
-
-
-  NOTE BigFile maintains the following invariant:
-
-    data property is either
-
-      - BTreeData instance,  or
-      - str(*),  or
-      - None.
-
-    (*) str has to be supported because '' is a default value for `data` field
-        from Data property sheet.
-
-        Even more - for
-
-            a) compatibility reasons, and
-            b) desire to support automatic migration of File-based documents
-               from document_module to BigFiles
-
-        non-empty str for data also have to be supported.
-
-        XXX(kirr) I'm not sure supporting non-empty str is a good idea (it
-            would be simpler if .data could be either BTreeData or "empty"),
-            but neither I'm experienced enough in erp5 nor know what are
-            appropriate compatibility requirements.
-
-            We discussed with Romain and settled on "None or str or BTreeData"
-            invariant for now.
   """
 
   meta_type = 'ERP5 Big File'
@@ -126,11 +98,6 @@ class BigFile(File):
 
     if data is None:
       btree = BTreeData()
-    elif isinstance(data, str):
-      # we'll want to append content to this file -
-      # - automatically convert str (empty or not) to BTreeData
-      btree = BTreeData()
-      btree.write(data, 0)
     else:
       btree = data
     seek(0)
@@ -149,14 +116,6 @@ class BigFile(File):
       self.serialize()
     return btree, len(btree)
 
-  def _data_mtime(self):
-    """get .data mtime if present and fallback to self._p_mtime"""
-    # there is no data._p_mtime when data is None or str.
-    # so try and fallback to self._p_mtime
-    data = self._baseGetData()
-    mtime = getattr(data, '_p_mtime', self._p_mtime)
-    return mtime
-
   def _range_request_handler(self, REQUEST, RESPONSE):
     # HTTP Range header handling: return True if we've served a range
     # chunk out of our data.
@@ -188,10 +147,13 @@ class BigFile(File):
           try: mod_since=long(DateTime(date).timeTime())
           except: mod_since=None
           if mod_since is not None:
-            last_mod = self._data_mtime()
-            if last_mod is None:
-                last_mod = 0
-            last_mod = long(last_mod)
+            if data is not None:
+              last_mod = long(data._p_mtime)
+            else:
+              if self._p_mtime:
+                last_mod = long(self._p_mtime)
+              else:
+                last_mod = long(0)
             if last_mod > mod_since:
               # Modified, so send a normal response. We delete
               # the ranges, which causes us to skip to the 200
@@ -210,7 +172,10 @@ class BigFile(File):
           RESPONSE.setHeader('Content-Range',
               'bytes */%d' % self.getSize())
           RESPONSE.setHeader('Accept-Ranges', 'bytes')
-          RESPONSE.setHeader('Last-Modified', rfc1123_date(self._data_mtime()))
+          if data is not None:
+            RESPONSE.setHeader('Last-Modified', rfc1123_date(data._p_mtime))
+          else:
+            RESPONSE.setHeader('Last-Modified', rfc1123_date(self._p_mtime))
           RESPONSE.setHeader('Content-Type', self.content_type)
           RESPONSE.setHeader('Content-Length', self.getSize())
           RESPONSE.setStatus(416)
@@ -223,7 +188,10 @@ class BigFile(File):
           start, end = ranges[0]
           size = end - start
 
-          RESPONSE.setHeader('Last-Modified', rfc1123_date(self._data_mtime()))
+          if data is not None:
+            RESPONSE.setHeader('Last-Modified', rfc1123_date(data._p_mtime))
+          else:
+            RESPONSE.setHeader('Last-Modified', rfc1123_date(self._p_mtime))
           RESPONSE.setHeader('Content-Type', self.content_type)
           RESPONSE.setHeader('Content-Length', size)
           RESPONSE.setHeader('Accept-Ranges', 'bytes')
@@ -231,7 +199,6 @@ class BigFile(File):
               'bytes %d-%d/%d' % (start, end - 1, self.getSize()))
           RESPONSE.setStatus(206) # Partial content
 
-          # NOTE data cannot be None here (if it is - ranges are not satisfiable)
           if isinstance(data, str):
             RESPONSE.write(data[start:end])
             return True
@@ -260,7 +227,10 @@ class BigFile(File):
 
           RESPONSE.setHeader('Content-Length', size)
           RESPONSE.setHeader('Accept-Ranges', 'bytes')
-          RESPONSE.setHeader('Last-Modified', rfc1123_date(self._data_mtime()))
+          if data is not None:
+            RESPONSE.setHeader('Last-Modified', rfc1123_date(data._p_mtime))
+          else:
+            RESPONSE.setHeader('Last-Modified', rfc1123_date(self._p_mtime))
           RESPONSE.setHeader('Content-Type',
               'multipart/%sbyteranges; boundary=%s' % (
                   draftprefix, boundary))
@@ -274,7 +244,6 @@ class BigFile(File):
                 'Content-Range: bytes %d-%d/%d\r\n\r\n' % (
                     start, end - 1, self.getSize()))
 
-            # NOTE data cannot be None here (if it is - ranges are not satisfiable)
             if isinstance(data, str):
               RESPONSE.write(data[start:end])
 
@@ -311,7 +280,7 @@ class BigFile(File):
     data = self._baseGetData()
     mime = self.getContentType()
 
-    RESPONSE.setHeader('Content-Length', data is not None and  len(data)  or  0)
+    RESPONSE.setHeader('Content-Length', len(data))
     RESPONSE.setHeader('Content-Type', mime)
     if inline is _MARKER:
       # by default, use inline for text and image formats
@@ -344,8 +313,7 @@ class BigFile(File):
 
     content_range = REQUEST.get_header('Content-Range', None)
     if content_range is None:
-      # truncate the file
-      self._baseSetData(None)
+      btree = None
     else:
       current_size = int(self.getSize())
       query_range = re.compile('bytes \*/\*')
@@ -353,6 +321,8 @@ class BigFile(File):
                                 '(?P<last_byte>[0-9]+)/' \
                                 '(?P<total_content_length>[0-9]+)')
       if query_range.match(content_range):
+        data = self._baseGetData()
+
         RESPONSE.setHeader('X-Explanation', 'Resume incomplete')
         RESPONSE.setHeader('Range', 'bytes 0-%s' % (current_size-1))
         RESPONSE.setStatus(308)
@@ -379,28 +349,25 @@ class BigFile(File):
           RESPONSE.setStatus(400)
           return RESPONSE
 
+        else:
+
+          btree = self._baseGetData()
+          if btree is None:
+            btree = BTreeData()
+
       else:
         RESPONSE.setHeader('X-Explanation', 'Can not parse range')
         RESPONSE.setStatus(400) # Partial content
         return RESPONSE
 
-    self._appendData(file, content_type=type)
+    data, size = self._read_data(file, data=btree)
 
-    RESPONSE.setStatus(204)
-    return RESPONSE
-
-
-  def _appendData(self, data_chunk, content_type=None):
-    """append data chunk to the end of the file
-
-       NOTE if content_type is specified, it will change content_type for the
-            whole file.
-    """
-    data, size = self._read_data(data_chunk, data=self._baseGetData())
-    content_type=self._get_content_type(data_chunk, data, self.__name__,
-                                        content_type or self.content_type)
+    content_type=self._get_content_type(file, data, self.__name__,
+                                        type or self.content_type)
     self.update_data(data, content_type, size)
 
+    RESPONSE.setStatus(204)
+    return RESPONSE
 
 # CMFFile also brings the IContentishInterface on CMF 2.2, remove it.
 removeIContentishInterface(BigFile)
-- 
2.30.9