mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2025-06-07 21:17:07 +09:00
LibDiff: Remove a bunch of unused code
We have no need to support normal or context diffs in ladybird, as well as the ability to parse and apply patches.
This commit is contained in:
parent
03e34db9f8
commit
41b060be81
Notes:
github-actions[bot]
2025-05-13 10:47:39 +00:00
Author: https://github.com/shannonbooth
Commit: 41b060be81
Pull-request: https://github.com/LadybirdBrowser/ladybird/pull/4717
8 changed files with 0 additions and 657 deletions
|
@ -1,218 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2023-2024, Shannon Booth <shannon@serenityos.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include <AK/Stream.h>
|
||||
#include <LibDiff/Applier.h>
|
||||
#include <LibDiff/Hunks.h>
|
||||
|
||||
namespace Diff {
|
||||
|
||||
static size_t expected_line_number(HunkLocation const& location)
|
||||
{
|
||||
auto line = location.old_range.start_line;
|
||||
|
||||
// NOTE: This is to handle the case we are adding a file, e.g for a range such as:
|
||||
// '@@ -0,0 +1,3 @@'
|
||||
if (location.old_range.start_line == 0)
|
||||
++line;
|
||||
|
||||
VERIFY(line != 0);
|
||||
|
||||
return line;
|
||||
}
|
||||
|
||||
struct Location {
|
||||
size_t line_number;
|
||||
size_t fuzz { 0 };
|
||||
ssize_t offset { 0 };
|
||||
};
|
||||
|
||||
static Optional<Location> locate_hunk(Vector<StringView> const& content, Hunk const& hunk, ssize_t offset, size_t max_fuzz = 3)
|
||||
{
|
||||
// Make a first best guess at where the from-file range is telling us where the hunk should be.
|
||||
size_t offset_guess = expected_line_number(hunk.location) - 1 + offset;
|
||||
|
||||
// If there's no lines surrounding this hunk - it will always succeed,
|
||||
// so there is no point in checking any further. Note that this check is
|
||||
// also what makes matching against an empty 'from file' work (with no lines),
|
||||
// as in that case there is no content for us to even match against in the
|
||||
// first place!
|
||||
//
|
||||
// However, we also should reject patches being added when the hunk is
|
||||
// claiming the file is completely empty - but there are actually lines in
|
||||
// that file.
|
||||
if (hunk.location.old_range.number_of_lines == 0) {
|
||||
if (hunk.location.old_range.start_line == 0 && !content.is_empty())
|
||||
return {};
|
||||
return Location { offset_guess, 0, 0 };
|
||||
}
|
||||
|
||||
size_t patch_prefix_context = 0;
|
||||
for (auto const& line : hunk.lines) {
|
||||
if (line.operation != Line::Operation::Context)
|
||||
break;
|
||||
++patch_prefix_context;
|
||||
}
|
||||
|
||||
size_t patch_suffix_context = 0;
|
||||
for (auto const& line : hunk.lines.in_reverse()) {
|
||||
if (line.operation != Line::Operation::Context)
|
||||
break;
|
||||
++patch_suffix_context;
|
||||
}
|
||||
|
||||
size_t context = max(patch_prefix_context, patch_suffix_context);
|
||||
|
||||
// Look through the file trying to match the hunk for it. If we can't find anything anywhere in the file, then try and
|
||||
// match the hunk by ignoring an increasing amount of context lines. The number of context lines that are ignored is
|
||||
// called the 'fuzz'.
|
||||
for (size_t fuzz = 0; fuzz <= max_fuzz; ++fuzz) {
|
||||
auto suffix_fuzz = (patch_suffix_context >= context) ? (fuzz + patch_suffix_context - context) : 0;
|
||||
auto prefix_fuzz = (patch_prefix_context >= context) ? (fuzz + patch_prefix_context - context) : 0;
|
||||
|
||||
// If the fuzz is greater than the total number of lines for a hunk, then it may be possible for the hunk to match anything.
|
||||
if (suffix_fuzz + prefix_fuzz >= hunk.lines.size())
|
||||
return {};
|
||||
|
||||
auto hunk_matches_starting_from_line = [&](size_t line) {
|
||||
line += prefix_fuzz;
|
||||
|
||||
// Ensure that all of the lines in the hunk match starting from 'line', ignoring the specified number of context lines.
|
||||
return all_of(hunk.lines.begin() + prefix_fuzz, hunk.lines.end() - suffix_fuzz, [&](Line const& hunk_line) {
|
||||
// Ignore additions in our increment of line and comparison as they are not part of the 'original file'
|
||||
if (hunk_line.operation == Line::Operation::Addition)
|
||||
return true;
|
||||
|
||||
if (line >= content.size())
|
||||
return false;
|
||||
|
||||
if (content[line] != hunk_line.content)
|
||||
return false;
|
||||
|
||||
++line;
|
||||
return true;
|
||||
});
|
||||
};
|
||||
|
||||
for (size_t line = offset_guess; line < content.size(); ++line) {
|
||||
if (hunk_matches_starting_from_line(line))
|
||||
return Location { line, fuzz, static_cast<ssize_t>(line - offset_guess) };
|
||||
}
|
||||
|
||||
for (size_t line = offset_guess; line != 0; --line) {
|
||||
if (hunk_matches_starting_from_line(line - 1))
|
||||
return Location { line - 1, fuzz, static_cast<ssize_t>(line - offset_guess) };
|
||||
}
|
||||
}
|
||||
|
||||
// No bueno.
|
||||
return {};
|
||||
}
|
||||
|
||||
static ErrorOr<size_t> write_hunk(Stream& out, Hunk const& hunk, Location const& location, Vector<StringView> const& lines)
|
||||
{
|
||||
auto line_number = location.line_number;
|
||||
|
||||
for (auto const& patch_line : hunk.lines) {
|
||||
if (patch_line.operation == Line::Operation::Context) {
|
||||
TRY(out.write_formatted("{}\n", lines.at(line_number)));
|
||||
++line_number;
|
||||
} else if (patch_line.operation == Line::Operation::Addition) {
|
||||
TRY(out.write_formatted("{}\n", patch_line.content));
|
||||
} else if (patch_line.operation == Line::Operation::Removal) {
|
||||
++line_number;
|
||||
}
|
||||
}
|
||||
|
||||
return line_number;
|
||||
}
|
||||
|
||||
static ErrorOr<size_t> write_define_hunk(Stream& out, Hunk const& hunk, Location const& location, Vector<StringView> const& lines, StringView define)
|
||||
{
|
||||
enum class State {
|
||||
Outside,
|
||||
InsideIFNDEF,
|
||||
InsideIFDEF,
|
||||
InsideELSE,
|
||||
};
|
||||
|
||||
auto state = State::Outside;
|
||||
|
||||
auto line_number = location.line_number;
|
||||
|
||||
for (auto const& patch_line : hunk.lines) {
|
||||
if (patch_line.operation == Diff::Line::Operation::Context) {
|
||||
auto const& line = lines.at(line_number);
|
||||
++line_number;
|
||||
if (state != State::Outside) {
|
||||
TRY(out.write_formatted("#endif\n"));
|
||||
state = State::Outside;
|
||||
}
|
||||
TRY(out.write_formatted("{}\n", line));
|
||||
} else if (patch_line.operation == Diff::Line::Operation::Addition) {
|
||||
if (state == State::Outside) {
|
||||
state = State::InsideIFDEF;
|
||||
TRY(out.write_formatted("#ifdef {}\n", define));
|
||||
} else if (state == State::InsideIFNDEF) {
|
||||
state = State::InsideELSE;
|
||||
TRY(out.write_formatted("#else\n"));
|
||||
}
|
||||
TRY(out.write_formatted("{}\n", patch_line.content));
|
||||
} else if (patch_line.operation == Diff::Line::Operation::Removal) {
|
||||
auto const& line = lines.at(line_number);
|
||||
++line_number;
|
||||
|
||||
if (state == State::Outside) {
|
||||
state = State::InsideIFNDEF;
|
||||
TRY(out.write_formatted("#ifndef {}\n", define));
|
||||
} else if (state == State::InsideIFDEF) {
|
||||
state = State::InsideELSE;
|
||||
TRY(out.write_formatted("#else\n"));
|
||||
}
|
||||
TRY(out.write_formatted("{}\n", line));
|
||||
}
|
||||
}
|
||||
|
||||
if (state != State::Outside)
|
||||
TRY(out.write_formatted("#endif\n"));
|
||||
|
||||
return line_number;
|
||||
}
|
||||
|
||||
ErrorOr<void> apply_patch(Stream& out, Vector<StringView> const& lines, Patch const& patch, Optional<StringView> const& define)
|
||||
{
|
||||
size_t line_number = 0; // NOTE: relative to 'old' file.
|
||||
ssize_t offset_error = 0;
|
||||
|
||||
for (size_t hunk_num = 0; hunk_num < patch.hunks.size(); ++hunk_num) {
|
||||
auto const& hunk = patch.hunks[hunk_num];
|
||||
|
||||
auto maybe_location = locate_hunk(lines, hunk, offset_error);
|
||||
if (!maybe_location.has_value())
|
||||
return Error::from_string_literal("Failed to locate where to apply patch");
|
||||
|
||||
auto location = *maybe_location;
|
||||
offset_error += location.offset;
|
||||
|
||||
// Write up until where we have found this latest hunk from the old file.
|
||||
for (; line_number < location.line_number; ++line_number)
|
||||
TRY(out.write_formatted("{}\n", lines.at(line_number)));
|
||||
|
||||
// Then output the hunk to what we hope is the correct location in the file.
|
||||
if (define.has_value())
|
||||
line_number = TRY(write_define_hunk(out, hunk, location, lines, define.value()));
|
||||
else
|
||||
line_number = TRY(write_hunk(out, hunk, location, lines));
|
||||
}
|
||||
|
||||
// We've finished applying all hunks, write out anything from the old file we haven't already.
|
||||
for (; line_number < lines.size(); ++line_number)
|
||||
TRY(out.write_formatted("{}\n", lines[line_number]));
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
}
|
|
@ -1,16 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2023-2024, Shannon Booth <shannon@serenityos.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <AK/Error.h>
|
||||
#include <LibDiff/Forward.h>
|
||||
|
||||
namespace Diff {
|
||||
|
||||
ErrorOr<void> apply_patch(Stream& out, Vector<StringView> const& lines, Patch const& patch, Optional<StringView> const& define = {});
|
||||
|
||||
}
|
|
@ -1,9 +1,6 @@
|
|||
|
||||
set(SOURCES
|
||||
Applier.cpp
|
||||
Format.cpp
|
||||
Generator.cpp
|
||||
Hunks.cpp
|
||||
)
|
||||
|
||||
serenity_lib(LibDiff diff)
|
||||
|
|
|
@ -7,24 +7,11 @@
|
|||
*/
|
||||
|
||||
#include "Format.h"
|
||||
#include <AK/Assertions.h>
|
||||
#include <AK/ByteString.h>
|
||||
#include <AK/Stream.h>
|
||||
#include <AK/StringBuilder.h>
|
||||
#include <AK/Vector.h>
|
||||
#include <LibDiff/Hunks.h>
|
||||
|
||||
namespace Diff {
|
||||
ByteString generate_only_additions(StringView text)
|
||||
{
|
||||
auto lines = text.split_view('\n', SplitBehavior::KeepEmpty);
|
||||
StringBuilder builder;
|
||||
builder.appendff("@@ -0,0 +1,{} @@\n", lines.size());
|
||||
for (auto const& line : lines) {
|
||||
builder.appendff("+{}\n", line);
|
||||
}
|
||||
return builder.to_byte_string();
|
||||
}
|
||||
|
||||
ErrorOr<void> write_unified_header(StringView old_path, StringView new_path, Stream& stream)
|
||||
{
|
||||
|
@ -55,168 +42,4 @@ ErrorOr<void> write_unified(Hunk const& hunk, Stream& stream, ColorOutput color_
|
|||
return {};
|
||||
}
|
||||
|
||||
ErrorOr<void> write_normal(Hunk const& hunk, Stream& stream, ColorOutput color_output)
|
||||
{
|
||||
// Source line(s)
|
||||
TRY(stream.write_formatted("{}", hunk.location.old_range.start_line));
|
||||
if (hunk.location.old_range.number_of_lines > 1)
|
||||
TRY(stream.write_formatted(",{}", (hunk.location.old_range.start_line + hunk.location.old_range.number_of_lines - 1)));
|
||||
|
||||
// Action
|
||||
if (hunk.location.old_range.number_of_lines > 0 && hunk.location.new_range.number_of_lines > 0)
|
||||
TRY(stream.write_formatted("c"));
|
||||
else if (hunk.location.new_range.number_of_lines > 0)
|
||||
TRY(stream.write_formatted("a"));
|
||||
else
|
||||
TRY(stream.write_formatted("d"));
|
||||
|
||||
// Target line(s)
|
||||
TRY(stream.write_formatted("{}", hunk.location.new_range.start_line));
|
||||
if (hunk.location.new_range.number_of_lines > 1)
|
||||
TRY(stream.write_formatted(",{}", (hunk.location.new_range.start_line + hunk.location.new_range.number_of_lines - 1)));
|
||||
|
||||
TRY(stream.write_formatted("\n"));
|
||||
|
||||
for (auto const& line : hunk.lines) {
|
||||
VERIFY(line.operation == Line::Operation::Removal || line.operation == Line::Operation::Addition);
|
||||
|
||||
if (line.operation == Line::Operation::Addition) {
|
||||
if (color_output == ColorOutput::Yes)
|
||||
TRY(stream.write_formatted("\033[32;1m> {}\033[0m\n", line.content));
|
||||
else
|
||||
TRY(stream.write_formatted("> {}\n", line.content));
|
||||
} else {
|
||||
if (color_output == ColorOutput::Yes)
|
||||
TRY(stream.write_formatted("\033[31;1m< {}\033[0m\n", line.content));
|
||||
else
|
||||
TRY(stream.write_formatted("< {}\n", line.content));
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
struct SplitLines {
|
||||
Vector<Line> old_lines;
|
||||
Vector<Line> new_lines;
|
||||
};
|
||||
|
||||
static ErrorOr<SplitLines> split_hunk_into_old_and_new_lines(Hunk const& hunk)
|
||||
{
|
||||
size_t new_lines_last_context = 0;
|
||||
size_t old_lines_last_context = 0;
|
||||
SplitLines lines;
|
||||
|
||||
auto operation = Line::Operation::Context;
|
||||
|
||||
bool is_all_insertions = true;
|
||||
bool is_all_deletions = true;
|
||||
|
||||
auto check_if_line_is_a_change = [&](Line::Operation op) {
|
||||
if (operation != op) {
|
||||
// We've switched from additions to removals or vice-versa.
|
||||
// All lines starting from the last context line we saw must be changes.
|
||||
operation = Line::Operation::Change;
|
||||
for (size_t i = new_lines_last_context; i < lines.new_lines.size(); ++i)
|
||||
lines.new_lines[i].operation = Line::Operation::Change;
|
||||
for (size_t i = old_lines_last_context; i < lines.old_lines.size(); ++i)
|
||||
lines.old_lines[i].operation = Line::Operation::Change;
|
||||
}
|
||||
};
|
||||
|
||||
for (auto const& line : hunk.lines) {
|
||||
switch (line.operation) {
|
||||
case Line::Operation::Context:
|
||||
VERIFY(lines.old_lines.size() < hunk.location.old_range.number_of_lines);
|
||||
VERIFY(lines.new_lines.size() < hunk.location.new_range.number_of_lines);
|
||||
|
||||
operation = Line::Operation::Context;
|
||||
TRY(lines.new_lines.try_append(Line { operation, line.content }));
|
||||
TRY(lines.old_lines.try_append(Line { operation, line.content }));
|
||||
new_lines_last_context = lines.new_lines.size();
|
||||
old_lines_last_context = lines.old_lines.size();
|
||||
break;
|
||||
case Line::Operation::Addition:
|
||||
VERIFY(lines.new_lines.size() < hunk.location.new_range.number_of_lines);
|
||||
|
||||
if (operation != Line::Operation::Context)
|
||||
check_if_line_is_a_change(Line::Operation::Addition);
|
||||
else
|
||||
operation = Line::Operation::Addition;
|
||||
|
||||
TRY(lines.new_lines.try_append(Line { operation, line.content }));
|
||||
is_all_deletions = false;
|
||||
break;
|
||||
case Line::Operation::Removal:
|
||||
VERIFY(lines.old_lines.size() < hunk.location.old_range.number_of_lines);
|
||||
|
||||
if (operation != Line::Operation::Context)
|
||||
check_if_line_is_a_change(Line::Operation::Removal);
|
||||
else
|
||||
operation = Line::Operation::Removal;
|
||||
|
||||
TRY(lines.old_lines.try_append(Line { operation, line.content }));
|
||||
is_all_insertions = false;
|
||||
break;
|
||||
default:
|
||||
VERIFY_NOT_REACHED();
|
||||
}
|
||||
}
|
||||
|
||||
VERIFY(lines.new_lines.size() == hunk.location.new_range.number_of_lines && lines.old_lines.size() == hunk.location.old_range.number_of_lines);
|
||||
|
||||
if (is_all_insertions)
|
||||
lines.old_lines.clear();
|
||||
else if (is_all_deletions)
|
||||
lines.new_lines.clear();
|
||||
|
||||
return lines;
|
||||
}
|
||||
|
||||
static ErrorOr<void> write_hunk_as_context(Vector<Line> const& old_lines, Vector<Line> const& new_lines, HunkLocation const& location, Stream& stream, ColorOutput color_output)
|
||||
{
|
||||
TRY(stream.write_formatted("*** {}", location.old_range.start_line));
|
||||
|
||||
if (location.old_range.number_of_lines > 1)
|
||||
TRY(stream.write_formatted(",{}", location.old_range.start_line + location.old_range.number_of_lines - 1));
|
||||
|
||||
TRY(stream.write_formatted(" ****\n"));
|
||||
|
||||
for (auto const& line : old_lines) {
|
||||
if (color_output == ColorOutput::Yes && (line.operation == Line::Operation::Removal || line.operation == Line::Operation::Change))
|
||||
TRY(stream.write_formatted("\033[31;1m{} {}\033[0m\n", line.operation, line.content));
|
||||
else
|
||||
TRY(stream.write_formatted("{} {}\n", line.operation, line.content));
|
||||
}
|
||||
|
||||
TRY(stream.write_formatted("--- {}", location.new_range.start_line));
|
||||
if (location.new_range.number_of_lines > 1)
|
||||
TRY(stream.write_formatted(",{}", location.new_range.start_line + location.new_range.number_of_lines - 1));
|
||||
|
||||
TRY(stream.write_formatted(" ----\n"));
|
||||
|
||||
for (auto const& line : new_lines) {
|
||||
if (color_output == ColorOutput::Yes && (line.operation == Line::Operation::Addition || line.operation == Line::Operation::Change))
|
||||
TRY(stream.write_formatted("\033[32;1m{} {}\033[0m\n", line.operation, line.content));
|
||||
else
|
||||
TRY(stream.write_formatted("{} {}\n", line.operation, line.content));
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
ErrorOr<void> write_context(Hunk const& hunk, Stream& stream, ColorOutput color_output)
|
||||
{
|
||||
auto const split_lines = TRY(split_hunk_into_old_and_new_lines(hunk));
|
||||
return write_hunk_as_context(split_lines.old_lines, split_lines.new_lines, hunk.location, stream, color_output);
|
||||
}
|
||||
|
||||
ErrorOr<void> write_context_header(StringView old_path, StringView new_path, Stream& stream)
|
||||
{
|
||||
TRY(stream.write_formatted("*** {}\n", old_path));
|
||||
TRY(stream.write_formatted("--- {}\n", new_path));
|
||||
|
||||
return stream.write_formatted("***************\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -13,8 +13,6 @@
|
|||
|
||||
namespace Diff {
|
||||
|
||||
ByteString generate_only_additions(StringView);
|
||||
|
||||
enum class ColorOutput {
|
||||
Yes,
|
||||
No,
|
||||
|
@ -23,8 +21,4 @@ enum class ColorOutput {
|
|||
ErrorOr<void> write_unified(Hunk const& hunk, Stream& stream, ColorOutput color_output = ColorOutput::No);
|
||||
ErrorOr<void> write_unified_header(StringView old_path, StringView new_path, Stream& stream);
|
||||
|
||||
ErrorOr<void> write_normal(Hunk const& hunk, Stream& stream, ColorOutput color_output = ColorOutput::No);
|
||||
|
||||
ErrorOr<void> write_context(Hunk const& hunk, Stream& stream, ColorOutput color_output = ColorOutput::No);
|
||||
ErrorOr<void> write_context_header(StringView old_path, StringView new_path, Stream& stream);
|
||||
}
|
||||
|
|
|
@ -12,11 +12,9 @@ enum class Format;
|
|||
|
||||
class Parser;
|
||||
|
||||
struct Header;
|
||||
struct Hunk;
|
||||
struct HunkLocation;
|
||||
struct Line;
|
||||
struct Patch;
|
||||
struct Range;
|
||||
|
||||
}
|
||||
|
|
|
@ -1,196 +0,0 @@
|
|||
/*
|
||||
* Copyright (c) 2020, Itamar S. <itamar8910@gmail.com>
|
||||
* Copyright (c) 2023, Shannon Booth <shannon@serenityos.org>
|
||||
*
|
||||
* SPDX-License-Identifier: BSD-2-Clause
|
||||
*/
|
||||
|
||||
#include "Hunks.h"
|
||||
#include <AK/Debug.h>
|
||||
#include <AK/LexicalPath.h>
|
||||
|
||||
namespace Diff {
|
||||
|
||||
Optional<HunkLocation> Parser::consume_unified_location()
|
||||
{
|
||||
auto consume_range = [this](Range& range) {
|
||||
if (!consume_line_number(range.start_line))
|
||||
return false;
|
||||
|
||||
if (consume_specific(',')) {
|
||||
if (!consume_line_number(range.number_of_lines))
|
||||
return false;
|
||||
} else {
|
||||
range.number_of_lines = 1;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
if (!consume_specific("@@ -"sv))
|
||||
return {};
|
||||
|
||||
HunkLocation location;
|
||||
|
||||
if (!consume_range(location.old_range))
|
||||
return {};
|
||||
|
||||
if (!consume_specific(" +"sv))
|
||||
return {};
|
||||
|
||||
if (!consume_range(location.new_range))
|
||||
return {};
|
||||
|
||||
if (!consume_specific(" @@"sv))
|
||||
return {};
|
||||
|
||||
return location;
|
||||
}
|
||||
|
||||
bool Parser::consume_line_number(size_t& number)
|
||||
{
|
||||
auto line = consume_while(is_ascii_digit);
|
||||
|
||||
auto maybe_number = line.to_number<size_t>();
|
||||
if (!maybe_number.has_value())
|
||||
return false;
|
||||
|
||||
number = maybe_number.value();
|
||||
return true;
|
||||
}
|
||||
|
||||
ErrorOr<String> Parser::parse_file_line(Optional<size_t> const& strip_count)
|
||||
{
|
||||
// FIXME: handle parsing timestamps as well.
|
||||
auto line = consume_line();
|
||||
|
||||
GenericLexer line_parser(line);
|
||||
auto path = line_parser.consume_until('\t');
|
||||
|
||||
// No strip count given. Default to basename of file.
|
||||
if (!strip_count.has_value())
|
||||
return String::from_byte_string(LexicalPath::basename(path));
|
||||
|
||||
// NOTE: We cannot use LexicalPath::parts as we want to strip the non-canonicalized path.
|
||||
auto const& parts = path.split_view('/');
|
||||
|
||||
// More components to strip than the filename has. Just pretend it is missing.
|
||||
if (strip_count.value() >= parts.size())
|
||||
return String();
|
||||
|
||||
// Remove given number of leading components from the path.
|
||||
size_t components = parts.size() - strip_count.value();
|
||||
|
||||
StringBuilder stripped_path;
|
||||
for (size_t i = parts.size() - components; i < parts.size(); ++i) {
|
||||
TRY(stripped_path.try_append(parts[i]));
|
||||
if (i != parts.size() - 1)
|
||||
TRY(stripped_path.try_append("/"sv));
|
||||
}
|
||||
|
||||
return stripped_path.to_string();
|
||||
}
|
||||
|
||||
ErrorOr<Patch> Parser::parse_patch(Optional<size_t> const& strip_count)
|
||||
{
|
||||
Patch patch;
|
||||
patch.header = TRY(parse_header(strip_count));
|
||||
patch.hunks = TRY(parse_hunks());
|
||||
return patch;
|
||||
}
|
||||
|
||||
ErrorOr<Header> Parser::parse_header(Optional<size_t> const& strip_count)
|
||||
{
|
||||
Header header;
|
||||
|
||||
while (!is_eof()) {
|
||||
|
||||
if (consume_specific("+++ "sv)) {
|
||||
header.new_file_path = TRY(parse_file_line(strip_count));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (consume_specific("--- "sv)) {
|
||||
header.old_file_path = TRY(parse_file_line(strip_count));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (next_is("@@ ")) {
|
||||
header.format = Format::Unified;
|
||||
return header;
|
||||
}
|
||||
|
||||
consume_line();
|
||||
}
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
ErrorOr<Vector<Hunk>> Parser::parse_hunks()
|
||||
{
|
||||
Vector<Hunk> hunks;
|
||||
|
||||
while (next_is("@@ ")) {
|
||||
// Try an locate a hunk location in this hunk. It may be prefixed with information.
|
||||
auto maybe_location = consume_unified_location();
|
||||
consume_line();
|
||||
|
||||
if (!maybe_location.has_value())
|
||||
break;
|
||||
|
||||
Hunk hunk { *maybe_location, {} };
|
||||
|
||||
auto old_lines_expected = hunk.location.old_range.number_of_lines;
|
||||
auto new_lines_expected = hunk.location.new_range.number_of_lines;
|
||||
|
||||
// We've found a location. Now parse out all of the expected content lines.
|
||||
while (old_lines_expected != 0 || new_lines_expected != 0) {
|
||||
StringView line = consume_line();
|
||||
|
||||
if (line.is_empty())
|
||||
return Error::from_string_literal("Malformed empty content line in patch");
|
||||
|
||||
if (line[0] != ' ' && line[0] != '+' && line[0] != '-')
|
||||
return Error::from_string_literal("Invaid operation in patch");
|
||||
|
||||
auto const operation = Line::operation_from_symbol(line[0]);
|
||||
|
||||
if (operation != Line::Operation::Removal) {
|
||||
if (new_lines_expected == 0)
|
||||
return Error::from_string_literal("Found more removal and context lines in patch than expected");
|
||||
|
||||
--new_lines_expected;
|
||||
}
|
||||
|
||||
if (operation != Line::Operation::Addition) {
|
||||
if (old_lines_expected == 0)
|
||||
return Error::from_string_literal("Found more addition and context lines in patch than expected");
|
||||
|
||||
--old_lines_expected;
|
||||
}
|
||||
|
||||
auto const content = line.substring_view(1, line.length() - 1);
|
||||
TRY(hunk.lines.try_append(Line { operation, TRY(String::from_utf8(content)) }));
|
||||
}
|
||||
|
||||
TRY(hunks.try_append(hunk));
|
||||
}
|
||||
|
||||
if constexpr (HUNKS_DEBUG) {
|
||||
for (auto const& hunk : hunks) {
|
||||
dbgln("{}", hunk.location);
|
||||
for (auto const& line : hunk.lines)
|
||||
dbgln("{}", line);
|
||||
}
|
||||
}
|
||||
|
||||
return hunks;
|
||||
}
|
||||
|
||||
ErrorOr<Vector<Hunk>> parse_hunks(StringView diff)
|
||||
{
|
||||
Parser lexer(diff);
|
||||
while (!lexer.next_is("@@ ") && !lexer.is_eof())
|
||||
lexer.consume_line();
|
||||
return lexer.parse_hunks();
|
||||
}
|
||||
}
|
|
@ -9,7 +9,6 @@
|
|||
|
||||
#include <AK/Assertions.h>
|
||||
#include <AK/Format.h>
|
||||
#include <AK/GenericLexer.h>
|
||||
#include <AK/String.h>
|
||||
#include <AK/StringView.h>
|
||||
#include <AK/Vector.h>
|
||||
|
@ -31,9 +30,6 @@ struct Line {
|
|||
Addition = '+',
|
||||
Removal = '-',
|
||||
Context = ' ',
|
||||
|
||||
// NOTE: This should only be used when deconstructing a hunk into old and new lines (context format)
|
||||
Change = '!',
|
||||
};
|
||||
|
||||
static constexpr Operation operation_from_symbol(char symbol)
|
||||
|
@ -59,41 +55,6 @@ struct Hunk {
|
|||
Vector<Line> lines;
|
||||
};
|
||||
|
||||
enum class Format {
|
||||
Unified,
|
||||
Unknown,
|
||||
};
|
||||
|
||||
struct Header {
|
||||
Format format { Format::Unknown };
|
||||
|
||||
String old_file_path;
|
||||
String new_file_path;
|
||||
};
|
||||
|
||||
struct Patch {
|
||||
Header header;
|
||||
Vector<Hunk> hunks;
|
||||
};
|
||||
|
||||
class Parser : public GenericLexer {
|
||||
public:
|
||||
using GenericLexer::GenericLexer;
|
||||
|
||||
ErrorOr<Patch> parse_patch(Optional<size_t> const& strip_count = {});
|
||||
|
||||
ErrorOr<Vector<Hunk>> parse_hunks();
|
||||
|
||||
private:
|
||||
ErrorOr<Header> parse_header(Optional<size_t> const& strip_count);
|
||||
|
||||
ErrorOr<String> parse_file_line(Optional<size_t> const& strip_count);
|
||||
Optional<HunkLocation> consume_unified_location();
|
||||
bool consume_line_number(size_t& number);
|
||||
};
|
||||
|
||||
ErrorOr<Vector<Hunk>> parse_hunks(StringView diff);
|
||||
|
||||
}
|
||||
|
||||
template<>
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue