[gnome-boxes/mcatanzaro/libsoup3] Port to libsoup 3
- From: Michael Catanzaro <mcatanzaro src gnome org>
- To: commits-list gnome org
- Cc:
- Subject: [gnome-boxes/mcatanzaro/libsoup3] Port to libsoup 3
- Date: Fri, 24 Jun 2022 21:54:46 +0000 (UTC)
commit d7c6d77fd0c266ac4836ea28745635e59d35ad29
Author: Michael Catanzaro <mcatanzaro redhat com>
Date: Fri Jun 24 16:50:13 2022 -0500
Port to libsoup 3
We are trying to remove the libsoup 2 build of WebKitGTK for the GNOME
43.alpha release. Since Boxes depends on WebKitGTK, this means it must
either port to libsoup 3 or else drop either the WebKitGTK dependency
or the libsoup dependency. Porting to libsoup 3 seems easy enough.
Note this will crash at runtime if linked to anything that uses libsoup
2. Building libosinfo with libsoup 3 is therefore mandatory.
src/assistant/rhel-download-dialog.vala | 10 ++++--
src/downloader.vala | 61 +++++++++++++--------------------
src/downloads-hub.vala | 13 ++++---
src/meson.build | 4 +--
4 files changed, 42 insertions(+), 46 deletions(-)
---
diff --git a/src/assistant/rhel-download-dialog.vala b/src/assistant/rhel-download-dialog.vala
index 0b590183..c3c5d565 100644
--- a/src/assistant/rhel-download-dialog.vala
+++ b/src/assistant/rhel-download-dialog.vala
@@ -63,8 +63,14 @@ private bool on_decide_policy (WebKit.WebView web_view,
!request_uri.has_prefix ("https://access.cdn.redhat.com"))
return false;
- var soup_request_uri = new Soup.URI (request_uri);
- var query = soup_request_uri.get_query ();
+ Uri? uri = null;
+ try {
+ uri = Uri.parse (request_uri, UriFlags.NONE);
+ } catch (UriError error) {
+ return false;
+ }
+
+ var query = uri.get_query ();
if (query == null)
return false;
diff --git a/src/downloader.vala b/src/downloader.vala
index 90b03db6..d501423f 100644
--- a/src/downloader.vala
+++ b/src/downloader.vala
@@ -62,7 +62,7 @@ private Downloader () {
session = new Soup.Session ();
if (Environment.get_variable ("SOUP_DEBUG") != null)
- session.add_feature (new Soup.Logger (Soup.LoggerLogLevel.HEADERS, -1));
+ session.add_feature (new Soup.Logger (Soup.LoggerLogLevel.HEADERS));
// As some websites redirect based on UA, lets force wget user-agent so the
// website assumes it's a CLI tool downloading the file.
@@ -127,19 +127,11 @@ public async File download (File remote_file,
private async void download_from_http (Download download, Cancellable? cancellable = null) throws
GLib.Error {
var msg = new Soup.Message ("GET", download.uri);
- msg.response_body.set_accumulate (false);
- var address = msg.get_address ();
- var connectable = new NetworkAddress (address.name, (uint16) address.port);
+ var connectable = msg.get_remote_address ();
var network_monitor = NetworkMonitor.get_default ();
if (!(yield network_monitor.can_reach_async (connectable)))
- throw new Boxes.Error.INVALID ("Failed to reach host '%s' on port '%d'", address.name,
address.port);
+ throw new Boxes.Error.INVALID ("Failed to connect to '%s'", connectable.to_string());
GLib.Error? err = null;
- ulong cancelled_id = 0;
- if (cancellable != null)
- cancelled_id = cancellable.connect (() => {
- err = new GLib.IOError.CANCELLED ("Cancelled by cancellable.");
- session.cancel_message (msg, Soup.Status.CANCELLED);
- });
int64 total_num_bytes = 0;
msg.got_headers.connect (() => {
@@ -151,41 +143,36 @@ private async void download_from_http (Download download, Cancellable? cancellab
FileCreateFlags.REPLACE_DESTINATION);
int64 current_num_bytes = 0;
- // FIXME: Reduce lambda nesting by splitting out downloading to Download class
- msg.got_chunk.connect ((msg, chunk) => {
- if (session.would_redirect (msg))
- return;
+ try {
+ var input_stream = yield session.send_async (msg, Priority.DEFAULT, cancellable);
+ while (true) {
+ uint8[] buffer = new uint8[2048];
+ var num_read = yield input_stream.read_async (buffer, Priority.DEFAULT, cancellable);
+ if (num_read == 0)
+ break;
+ // read_async() should throw on error.
+ assert (num_read > 0);
+ current_num_bytes += num_read;
+
+ // The resize is needed because the Vala bindings for writing
+ // take the number of bytes to write via the array's length.
+ buffer.resize((int)num_read);
+ size_t bytes_written;
+ yield cached_file_stream.write_all_async (buffer, Priority.DEFAULT, cancellable, out
bytes_written);
- current_num_bytes += chunk.length;
- try {
- // Write synchronously as we have no control over order of async
- // calls and we'll end up writing bytes out in wrong order. Besides
- // we are writing small chunks so it wouldn't really block the UI.
- cached_file_stream.write (chunk.data);
if (total_num_bytes > 0)
- // Don't report progress if there is no way to determine it
download.progress.progress = (double) current_num_bytes / total_num_bytes;
- } catch (GLib.Error e) {
- err = e;
- session.cancel_message (msg, Soup.Status.CANCELLED);
}
- });
- session.queue_message (msg, (session, msg) => {
- download_from_http.callback ();
- });
-
- yield;
-
- if (cancelled_id != 0)
- cancellable.disconnect (cancelled_id);
-
- yield cached_file_stream.close_async (Priority.DEFAULT, cancellable);
+ yield cached_file_stream.close_async (Priority.DEFAULT, cancellable);
+ } catch (GLib.Error e) {
+ err = e;
+ }
if (msg.status_code != Soup.Status.OK) {
download.cached_file.delete ();
if (err == null)
- err = new GLib.Error (Soup.http_error_quark (), (int)msg.status_code, msg.reason_phrase);
+ err = new GLib.Error (IOError.FAILED, (int)msg.status_code, msg.reason_phrase);
throw err;
}
diff --git a/src/downloads-hub.vala b/src/downloads-hub.vala
index df05631c..2d858a87 100644
--- a/src/downloads-hub.vala
+++ b/src/downloads-hub.vala
@@ -170,12 +170,15 @@ private bool draw_button_pie (Widget drawing_area, Cairo.Context context) {
});
progress_bar.fraction = progress.progress = 0;
- var soup_download_uri = new Soup.URI (entry.url);
- var download_path = soup_download_uri.get_path ();
-
- var filename = GLib.Path.get_basename (download_path);
+ try {
+ var download_uri = Uri.parse (entry.url, UriFlags.NONE);
+ var download_path = download_uri.get_path ();
+ var filename = GLib.Path.get_basename (download_path);
- download.begin (entry.url, filename);
+ download.begin (entry.url, filename);
+ } catch (UriError error) {
+ App.app.main_window.display_toast (new Boxes.Toast (_("Failed to download: %s").printf
(error.message)));
+ }
}
private async void download (string url, string filename) {
diff --git a/src/meson.build b/src/meson.build
index e442406d..a9d3c192 100644
--- a/src/meson.build
+++ b/src/meson.build
@@ -108,13 +108,13 @@ dependencies = [
dependency ('libhandy-1', version: '>= 1.5.0'),
dependency ('libosinfo-1.0', version: '>= 1.7.0'),
dependency ('libsecret-1'),
- dependency ('libsoup-2.4', version: '>= 2.38'),
+ dependency ('libsoup-3.0'),
dependency ('libusb-1.0', version: '>= 1.0.9'),
dependency ('libvirt-gconfig-1.0', version: '>= 4.0.0'),
dependency ('libvirt-gobject-1.0', version: '>= 4.0.0'),
dependency ('libxml-2.0', version: '>= 2.7.8'),
dependency ('tracker-sparql-3.0'),
- dependency ('webkit2gtk-4.0', version: '>= 2.26.0'),
+ dependency ('webkit2gtk-4.1'),
cc.find_library('m', required : false),
valac.find_library ('gio-2.0-workaround', dirs: vapi_dir),
valac.find_library ('linux'),
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]