2012-05-16 18:35:22 +04:00
|
|
|
/*
|
|
|
|
grive: an GPL program to sync a local directory with Google Drive
|
|
|
|
Copyright (C) 2012 Wan Wai Ho
|
|
|
|
|
|
|
|
This program is free software; you can redistribute it and/or
|
|
|
|
modify it under the terms of the GNU General Public License
|
|
|
|
as published by the Free Software Foundation version 2
|
|
|
|
of the License.
|
|
|
|
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with this program; if not, write to the Free Software
|
|
|
|
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include "State.hh"
|
|
|
|
|
2012-05-17 20:37:11 +04:00
|
|
|
#include "Collection.hh"
|
|
|
|
#include "CommonUri.hh"
|
|
|
|
|
2012-05-16 18:35:22 +04:00
|
|
|
#include "util/Crypt.hh"
|
|
|
|
#include "util/Log.hh"
|
|
|
|
#include "protocol/Json.hh"
|
|
|
|
|
2012-05-16 20:52:17 +04:00
|
|
|
#include <boost/bind.hpp>
|
2012-05-16 18:35:22 +04:00
|
|
|
|
2012-05-16 20:52:17 +04:00
|
|
|
#include <boost/multi_index_container.hpp>
|
|
|
|
#include <boost/multi_index/hashed_index.hpp>
|
|
|
|
#include <boost/multi_index/identity.hpp>
|
|
|
|
#include <boost/multi_index/member.hpp>
|
2012-05-17 20:37:11 +04:00
|
|
|
#include <boost/multi_index/mem_fun.hpp>
|
2012-05-16 18:35:22 +04:00
|
|
|
|
2012-05-16 20:52:17 +04:00
|
|
|
#include <fstream>
|
2012-05-16 18:35:22 +04:00
|
|
|
|
2012-05-16 20:52:17 +04:00
|
|
|
namespace gr {
|
|
|
|
|
|
|
|
namespace
|
2012-05-16 18:35:22 +04:00
|
|
|
{
|
2012-05-16 20:52:17 +04:00
|
|
|
struct Resource
|
2012-05-16 18:35:22 +04:00
|
|
|
{
|
2012-05-16 20:52:17 +04:00
|
|
|
std::string id ;
|
|
|
|
fs::path path ;
|
|
|
|
std::string md5sum ;
|
|
|
|
std::time_t mtime ;
|
2012-05-16 18:35:22 +04:00
|
|
|
|
2012-05-16 20:52:17 +04:00
|
|
|
explicit Resource( const fs::path& p ) :
|
|
|
|
path( p ),
|
|
|
|
md5sum( crypt::MD5( p ) ),
|
|
|
|
mtime( fs::last_write_time( p ) )
|
2012-05-16 18:35:22 +04:00
|
|
|
{
|
|
|
|
}
|
2012-05-16 20:52:17 +04:00
|
|
|
|
|
|
|
explicit Resource( const Json& json ) :
|
|
|
|
id( json["id"].Str() ),
|
|
|
|
path( json["path"].Str() ),
|
|
|
|
md5sum( json["md5"].Str() ),
|
|
|
|
mtime( json["mtime"].Int() )
|
2012-05-16 18:35:22 +04:00
|
|
|
{
|
|
|
|
}
|
|
|
|
|
2012-05-16 20:52:17 +04:00
|
|
|
Json Get() const
|
|
|
|
{
|
|
|
|
Json entry ;
|
|
|
|
entry.Add( "id", Json( id ) ) ;
|
|
|
|
entry.Add( "path", Json( path.string() ) ) ;
|
|
|
|
entry.Add( "md5", Json( md5sum ) ) ;
|
|
|
|
entry.Add( "mtime", Json( mtime ) ) ;
|
|
|
|
return entry ;
|
|
|
|
}
|
|
|
|
} ;
|
|
|
|
|
|
|
|
struct PathHash
|
|
|
|
{
|
|
|
|
std::size_t operator()( const fs::path& p ) const
|
|
|
|
{
|
|
|
|
return boost::hash_value( p.string() ) ;
|
|
|
|
}
|
|
|
|
} ;
|
|
|
|
|
|
|
|
using namespace boost::multi_index ;
|
|
|
|
|
2012-05-17 20:37:11 +04:00
|
|
|
struct ByID {} ;
|
2012-05-16 20:52:17 +04:00
|
|
|
struct ByPath {} ;
|
|
|
|
|
|
|
|
typedef multi_index_container<
|
|
|
|
Resource,
|
|
|
|
indexed_by<
|
2012-05-17 20:37:11 +04:00
|
|
|
hashed_non_unique< tag<ByID>, member<Resource, std::string, &Resource::id> >,
|
2012-05-16 20:52:17 +04:00
|
|
|
hashed_unique< tag<ByPath>,member<Resource, fs::path, &Resource::path>, PathHash >
|
|
|
|
>
|
|
|
|
> ResourceSet ;
|
|
|
|
|
2012-05-17 20:37:11 +04:00
|
|
|
typedef ResourceSet::index<ByID>::type IDIdx ;
|
2012-05-16 20:52:17 +04:00
|
|
|
typedef ResourceSet::index<ByPath>::type PathIdx ;
|
2012-05-19 11:26:55 +04:00
|
|
|
/*
|
2012-05-17 20:37:11 +04:00
|
|
|
struct ByHref {} ;
|
|
|
|
struct ByIdentity {} ;
|
|
|
|
|
|
|
|
typedef multi_index_container<
|
|
|
|
Collection*,
|
|
|
|
indexed_by<
|
|
|
|
hashed_non_unique<tag<ByHref>, const_mem_fun<Collection, std::string, &Collection::SelfHref> >,
|
|
|
|
hashed_non_unique<tag<ByID>, const_mem_fun<Collection, std::string, &Collection::ResourceID> >,
|
|
|
|
hashed_unique<tag<ByIdentity>, identity<Collection*> >
|
|
|
|
>
|
|
|
|
> Folders ;
|
|
|
|
|
|
|
|
typedef Folders::index<ByHref>::type FoldersByHref ;
|
2012-05-19 11:26:55 +04:00
|
|
|
typedef Folders::index<ByIdentity>::type FSet ;*/
|
2012-05-16 20:52:17 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
struct State::Impl
|
|
|
|
{
|
2012-05-19 11:26:55 +04:00
|
|
|
ResourceSet rs ;
|
|
|
|
FolderSet folders ;
|
|
|
|
std::string change_stamp ;
|
2012-05-17 20:37:11 +04:00
|
|
|
|
|
|
|
std::vector<Entry> unresolved ;
|
2012-05-16 20:52:17 +04:00
|
|
|
} ;
|
|
|
|
|
|
|
|
State::State( const fs::path& filename ) :
|
|
|
|
m_impl( new Impl )
|
|
|
|
{
|
|
|
|
if ( fs::exists( filename ) )
|
|
|
|
Read( filename );
|
|
|
|
}
|
|
|
|
|
|
|
|
void State::Read( const fs::path& filename )
|
|
|
|
{
|
|
|
|
Trace( "reading %1%", filename ) ;
|
|
|
|
Json json = Json::ParseFile( filename.string() ) ;
|
|
|
|
std::vector<Json> res = json["resources"].AsArray() ;
|
|
|
|
|
|
|
|
for ( std::vector<Json>::iterator i = res.begin() ; i != res.end() ; ++i )
|
|
|
|
m_impl->rs.insert( Resource( *i ) ) ;
|
|
|
|
|
|
|
|
m_impl->change_stamp = json["change_stamp"].Str() ;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string State::ChangeStamp() const
|
|
|
|
{
|
|
|
|
return m_impl->change_stamp ;
|
|
|
|
}
|
|
|
|
|
|
|
|
void State::ChangeStamp( const std::string& cs )
|
|
|
|
{
|
|
|
|
m_impl->change_stamp = cs ;
|
|
|
|
}
|
|
|
|
|
|
|
|
void State::Sync( const fs::path& p )
|
|
|
|
{
|
2012-05-19 11:26:55 +04:00
|
|
|
// FoldersByHref& idx = m_impl->folders.get<ByHref>() ;
|
|
|
|
// FoldersByHref::iterator it = idx.find( root_href ) ;
|
|
|
|
|
|
|
|
// Collection *root = m_impl->folders.FindByHref( root_href ) ;
|
|
|
|
// assert( root != 0 ) ;
|
|
|
|
Sync( p, m_impl->folders.Root() ) ;
|
2012-05-17 20:37:11 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
void State::Sync( const fs::path& p, Collection *folder )
|
|
|
|
{
|
2012-05-19 11:26:55 +04:00
|
|
|
assert( folder != 0 ) ;
|
|
|
|
|
2012-05-17 20:37:11 +04:00
|
|
|
// Trace( "synchronizing = %1%", p ) ;
|
2012-05-16 20:52:17 +04:00
|
|
|
for ( fs::directory_iterator i( p ) ; i != fs::directory_iterator() ; ++i )
|
|
|
|
{
|
2012-05-17 20:37:11 +04:00
|
|
|
// Trace( "file found = %2% (%1%)", i->path(), i->path().filename() ) ;
|
2012-05-16 20:52:17 +04:00
|
|
|
if ( fs::is_directory( i->path() ) )
|
2012-05-17 20:37:11 +04:00
|
|
|
{
|
|
|
|
Collection *c = new Collection( i->path().filename().string(), "" ) ;
|
|
|
|
folder->AddChild( c ) ;
|
|
|
|
|
|
|
|
Sync( *i, c ) ;
|
|
|
|
}
|
2012-05-16 20:52:17 +04:00
|
|
|
else if ( i->path().filename().string()[0] != '.' )
|
|
|
|
m_impl->rs.insert( Resource( i->path() ) ) ;
|
2012-05-16 18:35:22 +04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-05-16 20:52:17 +04:00
|
|
|
void State::Write( const fs::path& filename ) const
|
|
|
|
{
|
|
|
|
Json result ;
|
|
|
|
result.Add( "change_stamp", Json( m_impl->change_stamp ) ) ;
|
|
|
|
|
2012-05-17 20:37:11 +04:00
|
|
|
IDIdx& idx = m_impl->rs.get<ByID>() ;
|
2012-05-16 20:52:17 +04:00
|
|
|
|
|
|
|
std::vector<Json> res ;
|
|
|
|
std::transform( idx.begin(), idx.end(),
|
|
|
|
std::back_inserter(res),
|
|
|
|
boost::bind( &Resource::Get, _1 ) ) ;
|
|
|
|
|
|
|
|
result.Add( "resources", Json(res) ) ;
|
|
|
|
|
2012-05-17 20:37:11 +04:00
|
|
|
// Trace( "%1%", result ) ;
|
2012-05-16 20:52:17 +04:00
|
|
|
|
|
|
|
std::ofstream fs( filename.string().c_str() ) ;
|
|
|
|
fs << result ;
|
|
|
|
}
|
|
|
|
|
2012-05-16 21:20:02 +04:00
|
|
|
void State::SetId( const fs::path& p, const std::string& id )
|
2012-05-16 18:35:22 +04:00
|
|
|
{
|
2012-05-16 21:20:02 +04:00
|
|
|
PathIdx& pidx = m_impl->rs.get<ByPath>() ;
|
|
|
|
PathIdx::iterator it = pidx.find( p ) ;
|
|
|
|
if ( it != pidx.end() )
|
|
|
|
{
|
|
|
|
Resource r = *it ;
|
|
|
|
r.id = id ;
|
|
|
|
pidx.replace( it, r ) ;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
Trace( "can't find %1%", p ) ;
|
|
|
|
}
|
2012-05-16 18:35:22 +04:00
|
|
|
}
|
|
|
|
|
2012-05-17 20:37:11 +04:00
|
|
|
void State::OnEntry( const Entry& e )
|
|
|
|
{
|
|
|
|
if ( !Update( e ) )
|
|
|
|
{
|
|
|
|
Trace( "can't find parent of %1%", e.Title() ) ;
|
|
|
|
m_impl->unresolved.push_back( e ) ;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void State::ResolveEntry()
|
|
|
|
{
|
|
|
|
Trace( "trying to resolve %1% entries", m_impl->unresolved.size() ) ;
|
|
|
|
while ( !m_impl->unresolved.empty() )
|
|
|
|
{
|
|
|
|
if ( TryResolveEntry() == 0 )
|
|
|
|
{
|
|
|
|
Trace( "cannot make progress" ) ;
|
|
|
|
break ;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Trace( "entries left = %1%", m_impl->unresolved.size() ) ;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::size_t State::TryResolveEntry()
|
|
|
|
{
|
|
|
|
assert( !m_impl->unresolved.empty() ) ;
|
|
|
|
|
|
|
|
std::size_t count = 0 ;
|
|
|
|
std::vector<Entry>& en = m_impl->unresolved ;
|
|
|
|
|
|
|
|
for ( std::vector<Entry>::iterator i = en.begin() ; i != en.end() ; )
|
|
|
|
{
|
|
|
|
if ( Update( *i ) )
|
|
|
|
{
|
|
|
|
i = en.erase( i ) ;
|
|
|
|
count++ ;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
++i ;
|
|
|
|
}
|
|
|
|
return count ;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool State::Update( const Entry& e )
|
|
|
|
{
|
2012-05-19 11:26:55 +04:00
|
|
|
// FoldersByHref& folders = m_impl->folders.get<ByHref>() ;
|
|
|
|
// FoldersByHref::iterator i = folders.find( e.ParentHref() ) ;
|
|
|
|
Collection *parent = m_impl->folders.FindByHref( e.ParentHref() ) ;
|
|
|
|
if ( parent != 0 )
|
2012-05-17 20:37:11 +04:00
|
|
|
{
|
2012-05-19 11:26:55 +04:00
|
|
|
Trace( "found parent of folder %1%: %2%", e.Title(), parent->Title() ) ;
|
2012-05-17 20:37:11 +04:00
|
|
|
|
|
|
|
// see if the entry already exist in local
|
2012-05-19 11:26:55 +04:00
|
|
|
Collection *child = parent->FindChild( e.Title() ) ;
|
2012-05-17 20:37:11 +04:00
|
|
|
if ( child != 0 )
|
|
|
|
{
|
|
|
|
// since we are updating the ID and Href, we need to remove it and re-add it.
|
2012-05-19 11:26:55 +04:00
|
|
|
m_impl->folders.Update( child, e ) ;
|
|
|
|
// FSet& fs = m_impl->folders.get<ByIdentity>() ;
|
|
|
|
// FSet::iterator c = fs.find( child ) ;
|
|
|
|
//
|
|
|
|
// if ( c != fs.end() )
|
|
|
|
// fs.erase( c ) ;
|
|
|
|
//
|
|
|
|
// child->Update( e ) ;
|
|
|
|
// folders.insert( child ) ;
|
2012-05-17 20:37:11 +04:00
|
|
|
}
|
|
|
|
|
|
|
|
// folder entry exist in google drive, but not local.
|
|
|
|
else
|
|
|
|
{
|
|
|
|
child = new Collection( e ) ;
|
2012-05-19 11:26:55 +04:00
|
|
|
parent->AddChild( child ) ;
|
|
|
|
m_impl->folders.Insert( child ) ;
|
2012-05-17 20:37:11 +04:00
|
|
|
}
|
|
|
|
return true ;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
return false ;
|
|
|
|
}
|
|
|
|
|
|
|
|
Collection* State::FindFolderByHref( const std::string& href )
|
|
|
|
{
|
2012-05-19 11:26:55 +04:00
|
|
|
// FoldersByHref& folders = m_impl->folders.get<ByHref>() ;
|
|
|
|
// FoldersByHref::iterator i = folders.find( href ) ;
|
|
|
|
// return i != folders.end() ? *i : 0 ;
|
|
|
|
return m_impl->folders.FindByHref( href ) ;
|
2012-05-17 20:37:11 +04:00
|
|
|
}
|
2012-05-16 21:20:02 +04:00
|
|
|
|
2012-05-16 18:35:22 +04:00
|
|
|
} // end of namespace
|