# # # patch "dates.cc" # from [b48dd8e975a4ecff8d6fdd8e4a3ffb2038baecbb] # to [15ef3f121022c56cf6bdd4450f93677993c7e0ec] # # patch "rcs_import.cc" # from [94f78542270de13a70bcb7c7e061734c291b3aa9] # to [ca78c4c78271d4d02e47cde0b4a782ddffec59ab] # ============================================================ --- dates.cc b48dd8e975a4ecff8d6fdd8e4a3ffb2038baecbb +++ dates.cc 15ef3f121022c56cf6bdd4450f93677993c7e0ec @@ -426,13 +426,6 @@ date_t::operator -=(u64 const & other) return *this; } -u64 -date_t::as_unix_epoch() const -{ - return 100; -} - - #ifdef BUILD_UNIT_TESTS #include "unit_tests.hh" ============================================================ --- rcs_import.cc 94f78542270de13a70bcb7c7e061734c291b3aa9 +++ rcs_import.cc ca78c4c78271d4d02e47cde0b4a782ddffec59ab @@ -535,14 +535,13 @@ cvs_history bool deps_sorted; // the upper limit of what to import - time_t upper_time_limit; + date_t upper_time_limit; cvs_history(project_t & project) : project(project), unnamed_branch_counter(0), step_no(0), - deps_sorted(false), - upper_time_limit(0) + deps_sorted(false) { }; void set_filename(string const & file, @@ -948,7 +947,7 @@ log_path(cvs_history & cvs, const string for (T i = begin; i != end; ++i) L(FL(" blob: %d\n %s\n height:%d, size:%d\n %s") % *i - % date_t::from_unix_epoch(cvs.blobs[*i].get_avg_time() / 100) + % date_t(cvs.blobs[*i].get_avg_time() / 100) % cvs.blobs[*i].height % cvs.blobs[*i].get_events().size() % get_event_repr(cvs, *cvs.blobs[*i].begin())); @@ -1666,8 +1665,9 @@ process_rcs_branch(lua_hooks & lua, data // Check if we are still within our time limit, if there is one. // Note that we add six months worth of events, for which we do // additional processing, but don't use the data afterwards. - if ((cvs.upper_time_limit > 0) && - (commit_time >= cvs.upper_time_limit + (60 * 60 * 24 * 30 * 6))) + if (cvs.upper_time_limit.valid() && + ((date_t(commit_time - (60 * 60 * 24 * 30 * 6)) > + cvs.upper_time_limit))) { commits_over_time++; if (commits_over_time > 1) @@ -3528,7 +3528,7 @@ split_cycle(cvs_history & cvs, vectoradj_time / 100) + % date_t((*ity)->adj_time / 100) % deps_from % deps_to); if (deps_from == 0) @@ -3905,7 +3905,7 @@ split_cycle(cvs_history & cvs, vector::iterator i = largest_gap_candidates.begin(); @@ -4687,7 +4687,7 @@ import_cvs_repo(options & opts, N(opts.branchname() != "", F("need base --branch argument for importing")); if (opts.until_given) - cvs.upper_time_limit = opts.until.as_unix_epoch(); + cvs.upper_time_limit = opts.until; // add the trunk branch name string bn = opts.branchname(); @@ -5390,7 +5390,7 @@ blob_consumer::create_artificial_revisio new_rid, branch_name(bn), utf8(changelog), - date_t::from_unix_epoch(commit_time), + date_t(commit_time), author); ++n_revisions; @@ -5509,7 +5509,7 @@ blob_consumer::operator()(cvs_blob_index new_rid, branch_name(bn), utf8(changelog), - date_t::from_unix_epoch(commit_time), + date_t(commit_time), author); // add the RCS information